hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1143559 [1/2] - in /hadoop/common/branches/HDFS-1073/common: ./ bin/ src/ src/docs/ src/java/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/s3/ src/java/org/apache/hadoop/fs/shell/ src/java/org/apache/hadoop/http/ src/java/o...
Date Wed, 06 Jul 2011 20:45:23 GMT
Author: todd
Date: Wed Jul  6 20:45:21 2011
New Revision: 1143559

URL: http://svn.apache.org/viewvc?rev=1143559&view=rev
Log:
Merge common from trunk into HDFS-1073

Added:
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/DataOutputOutputStream.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/DataOutputOutputStream.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/SnappyCodec.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/compress/SnappyCodec.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/snappy/
      - copied from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/compress/snappy/
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/snappy/LoadSnappy.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/compress/snappy/LoadSnappy.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/AnnotatedSecurityInfo.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/security/AnnotatedSecurityInfo.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityInfo.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/security/SecurityInfo.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/util/ProtoUtil.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/java/org/apache/hadoop/util/ProtoUtil.java
    hadoop/common/branches/HDFS-1073/common/src/native/src/org/apache/hadoop/io/compress/snappy/
      - copied from r1143556, hadoop/common/trunk/common/src/native/src/org/apache/hadoop/io/compress/snappy/
    hadoop/common/branches/HDFS-1073/common/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
    hadoop/common/branches/HDFS-1073/common/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
    hadoop/common/branches/HDFS-1073/common/src/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
    hadoop/common/branches/HDFS-1073/common/src/test/bin/smart-apply-patch.sh
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/test/bin/smart-apply-patch.sh
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestObjectWritableProtos.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/test/core/org/apache/hadoop/io/TestObjectWritableProtos.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/util/TestProtoUtil.java
      - copied unchanged from r1143556, hadoop/common/trunk/common/src/test/core/org/apache/hadoop/util/TestProtoUtil.java
Modified:
    hadoop/common/branches/HDFS-1073/common/   (props changed)
    hadoop/common/branches/HDFS-1073/common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1073/common/bin/hadoop-config.sh
    hadoop/common/branches/HDFS-1073/common/build.xml
    hadoop/common/branches/HDFS-1073/common/src/docs/   (props changed)
    hadoop/common/branches/HDFS-1073/common/src/java/   (props changed)
    hadoop/common/branches/HDFS-1073/common/src/java/core-default.xml
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFs.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileSystem.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/LocalDirAllocator.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/Trash.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/s3/INode.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/shell/Command.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BloomMapFile.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BytesWritable.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/IOUtils.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/ObjectWritable.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/WritableUtils.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/jmx/JMXJsonServlet.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/ScriptBasedMapping.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/SocketIOWithTimeout.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
    hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
    hadoop/common/branches/HDFS-1073/common/src/native/Makefile.am
    hadoop/common/branches/HDFS-1073/common/src/native/configure.ac
    hadoop/common/branches/HDFS-1073/common/src/native/packageNativeHadoop.sh
    hadoop/common/branches/HDFS-1073/common/src/saveVersion.sh
    hadoop/common/branches/HDFS-1073/common/src/test/bin/test-patch.sh
    hadoop/common/branches/HDFS-1073/common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHtmlQuoting.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/http/TestHttpServer.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestBytesWritable.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestIOUtils.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/TestSequenceFile.java   (props changed)
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestAvroRpc.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/ipc/TestSaslRPC.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/jmx/TestJMXJsonServlet.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/TestUserGroupInformation.java
    hadoop/common/branches/HDFS-1073/common/src/test/core/org/apache/hadoop/security/token/delegation/TestDelegationToken.java

Propchange: hadoop/common/branches/HDFS-1073/common/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Wed Jul  6 20:45:21 2011
@@ -1,8 +1,4 @@
-build
-build-fi
-build.properties
-logs
 .classpath
-.externalToolBuilders
 .project
-.settings
+build
+build-fi

Propchange: hadoop/common/branches/HDFS-1073/common/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1 +1,2 @@
+/hadoop/common/trunk/common:1134995-1143556
 /hadoop/core/branches/branch-0.19/core:713112

Modified: hadoop/common/branches/HDFS-1073/common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/CHANGES.txt?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1073/common/CHANGES.txt Wed Jul  6 20:45:21 2011
@@ -44,6 +44,15 @@ Trunk (unreleased changes)
     HADOOP-7144. Expose JMX metrics via JSON servlet. (Robert Joseph Evans via
     cdouglas)
 
+    HADOOP-7379. Add the ability to serialize and deserialize protocol buffers
+    in ObjectWritable. (todd)
+
+    HADOOP-7206. Support Snappy compression. (Issei Yoshida and
+    Alejandro Abdelnur via eli)
+
+    HADOOP-7329. Add the capability of getting invividual attribute of a mbean
+    using JMXProxyServlet. (tanping)
+
   IMPROVEMENTS
 
     HADOOP-7042. Updates to test-patch.sh to include failed test names and
@@ -212,6 +221,24 @@ Trunk (unreleased changes)
     HADOOP-7374. Don't add tools.jar to the classpath when running Hadoop.
     (eli)
 
+    HADOOP-7106. Reorganize project SVN layout to "unsplit" the projects.
+    (todd, nigel)
+
+    HADOOP-6605. Add JAVA_HOME detection to hadoop-config. (eli)
+
+    HADOOP-7384. Allow test-patch to be more flexible about patch format. (todd)
+
+    HADOOP-6929. RPC should have a way to pass Security information other than 
+    protocol annotations. (sharad and omalley via mahadev)
+
+    HADOOP-7385. Remove StringUtils.stringifyException(ie) in logger functions.
+    (Bharath Mundlapudi via Tanping Wang).
+
+    HADOOP-310. Additional constructor requested in BytesWritable. (Brock
+    Noland via atm)
+
+    HADOOP-7429. Add another IOUtils#copyBytes method. (eli)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -219,6 +246,9 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    HADOOP-7327. FileSystem.listStatus() throws NullPointerException instead of
+    IOException upon access permission failure. (mattf)
+
     HADOOP-7015. RawLocalFileSystem#listStatus does not deal with a directory
     whose entries are changing (e.g. in a multi-thread or multi-process
     environment). (Sanjay Radia via eli)
@@ -299,6 +329,35 @@ Trunk (unreleased changes)
     HADOOP-7356. RPM packages broke bin/hadoop script in developer environment.
     (Eric Yang via todd)
 
+    HADOOP-7389. Use of TestingGroups by tests causes subsequent tests to fail.
+    (atm via tomwhite)
+
+    HADOOP-7390. VersionInfo not generated properly in git after unsplit. (todd
+    via atm)
+
+    HADOOP-7377. Fix command name handling affecting DFSAdmin. (Daryn Sharp
+    via mattf)
+
+    HADOOP-7402. TestConfiguration doesn't clean up after itself. (atm via eli)
+
+    HADOOP-7428. IPC connection is orphaned with null 'out' member.
+    (todd via eli)
+
+    HADOOP-7437. IOUtils.copybytes will suppress the stream closure exceptions.
+    (Uma Maheswara Rao G via szetszwo)
+
+    HADOOP-7090. Fix resource leaks in s3.INode, BloomMapFile, WritableUtils
+    and CBZip2OutputStream.  (Uma Maheswara Rao G via szetszwo)
+
+    HADOOP-7440. HttpServer.getParameterValues throws NPE for missing
+    parameters. (Uma Maheswara Rao G and todd via todd)
+
+    HADOOP-7442. Docs in core-default.xml still reference deprecated config
+    "topology.script.file.name" (atm)
+
+    HADOOP-7419. new hadoop-config.sh doesn't manage classpath for
+    HADOOP_CONF_DIR correctly. (Bing Zheng and todd via todd)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1073/common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1,3 +1,4 @@
+/hadoop/common/trunk/common/CHANGES.txt:1134995-1143556
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1073/common/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/bin/hadoop-config.sh?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/bin/hadoop-config.sh (original)
+++ hadoop/common/branches/HDFS-1073/common/bin/hadoop-config.sh Wed Jul  6 20:45:21 2011
@@ -107,18 +107,26 @@ fi
 # we use in Hadoop. Tune the variable down to prevent vmem explosion.
 export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
 
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
-  #echo "run java in $JAVA_HOME"
-  JAVA_HOME=$JAVA_HOME
-fi
-  
-if [ "$JAVA_HOME" = "" ]; then
-  echo "Error: JAVA_HOME is not set."
-  exit 1
+# Attempt to set JAVA_HOME if it is not set
+if [[ -z $JAVA_HOME ]]; then
+  # On OSX use java_home (or /Library for older versions)
+  if [ "Darwin" == "$(uname -s)" ]; then
+    if [ -x /usr/libexec/java_home ]; then
+      export JAVA_HOME=($(/usr/libexec/java_home))
+    else
+      export JAVA_HOME=(/Library/Java/Home)
+    fi
+  fi
+
+  # Bail if we did not detect it
+  if [[ -z $JAVA_HOME ]]; then
+    echo "Error: JAVA_HOME is not set and could not be found." 1>&2
+    exit 1
+  fi
 fi
 
 JAVA=$JAVA_HOME/bin/java
+# some Java parameters
 JAVA_HEAP_MAX=-Xmx1000m 
 
 # check envvars which might override default args
@@ -277,7 +285,7 @@ if [ -d "${HADOOP_HDFS_HOME}" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
   fi
   
-  if [ -d "${HADOOP_HDFS_HOME}/conf" ]; then
+  if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_HDFS_HOME}/conf" ]; then
     CLASSPATH=${CLASSPATH}:${HADOOP_HDFS_HOME}/conf
   fi
   
@@ -315,7 +323,7 @@ if [ -d "${HADOOP_MAPRED_HOME}" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
   fi
 
-  if [ -d "${HADOOP_MAPRED_HOME}/conf" ]; then
+  if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_MAPRED_HOME}/conf" ]; then
     CLASSPATH=${CLASSPATH}:${HADOOP_MAPRED_HOME}/conf
   fi
   

Modified: hadoop/common/branches/HDFS-1073/common/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/build.xml?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/build.xml (original)
+++ hadoop/common/branches/HDFS-1073/common/build.xml Wed Jul  6 20:45:21 2011
@@ -187,6 +187,9 @@
   <property name="build.dir.eclipse-test-classes" value="${build.dir.eclipse}/classes-test"/>
   <property name="build.dir.eclipse-test-generated-classes" value="${build.dir.eclipse}/classes-test-generated"/>
 
+  <!-- Use environment -->
+  <property environment="env" />
+
   <!-- check if clover reports should be generated -->
   <condition property="clover.enabled">
     <and>
@@ -210,6 +213,14 @@
   <property name="package.buildroot" value="/tmp/hadoop_package_build_${user.name}"/>
   <property name="package.build.dir" value="/tmp/hadoop_package_build_${user.name}/BUILD"/>
 
+  <!-- Indicate is Snappy native library should be bundled with Hadoop or not -->
+  <property name="bundle.snappy" value="false"/>
+
+  <!-- Snappy native library location -->
+  <property name="snappy.prefix" value="/usr/local"/>
+  <property name="snappy.lib" value="${snappy.prefix}/lib"/>
+  <property name="snappy.include" value="${snappy.prefix}/include"/>
+
   <!-- the normal classpath -->
   <path id="classpath">
     <pathelement location="${build.classes}"/>
@@ -228,7 +239,7 @@
     <pathelement path="${clover.jar}"/>
     <path refid="ivy-common.classpath"/>
     <path refid="ivy-test.classpath"/>
-    <pathelement location="${build.classes}"/>
+    <pathelement location="${hadoop-common.jar}"/>
     <pathelement location="${test.conf.dir}"/>
   </path>
 <!--
@@ -401,12 +412,13 @@
   <target name="create-native-makefile" depends="check-native-makefile" if="need.native.makefile"> 
     <antcall target="create-native-configure"/>
     <mkdir dir="${build.native}"/>
-	<exec dir="${build.native}" executable="sh" failonerror="true">
-	  <env key="OS_NAME" value="${os.name}"/>
-	  <env key="OS_ARCH" value="${os.arch}"/>
-	  <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
-	  <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
-	  <arg line="${native.src.dir}/configure"/>
+
+    <exec dir="${build.native}" executable="sh" failonerror="true">
+      <env key="OS_NAME" value="${os.name}"/>
+      <env key="OS_ARCH" value="${os.arch}"/>
+      <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+      <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
+      <arg line="${native.src.dir}/configure CPPFLAGS=-I${snappy.include} LDFLAGS=-L${snappy.lib}"/>
     </exec>
   </target>
 
@@ -416,6 +428,7 @@
   	
     <mkdir dir="${build.native}/lib"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
+    <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/snappy"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
 
@@ -429,7 +442,17 @@
       <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
   	</javah>
 
-  	<javah
+    <javah
+      classpath="${build.classes}"
+      destdir="${build.native}/src/org/apache/hadoop/io/compress/snappy"
+      force="yes"
+      verbose="yes"
+      >
+      <class name="org.apache.hadoop.io.compress.snappy.SnappyCompressor"/>
+      <class name="org.apache.hadoop.io.compress.snappy.SnappyDecompressor"/>
+    </javah>
+
+    <javah
   	  classpath="${build.classes}"
   	  destdir="${build.native}/src/org/apache/hadoop/security"
       force="yes"
@@ -489,6 +512,10 @@
     <property name="jar.properties.list" value="commons-logging.properties, log4j.properties, hadoop-metrics.properties" />
     <jar jarfile="${build.dir}/${final.name}.jar"
          basedir="${build.classes}">
+      <service type="org.apache.hadoop.security.SecurityInfo">
+        <provider 
+           classname="org.apache.hadoop.security.AnnotatedSecurityInfo"/>
+      </service>
       <manifest>
         <section name="org/apache/hadoop">
           <attribute name="Implementation-Title" value="${ant.project.name}"/>
@@ -562,7 +589,7 @@
   <target name="-classes-compilation"
     depends="compile-core-classes, compile-core-test"/> 
 
-  <target name="compile-core-test" depends="compile-core-classes, ivy-retrieve-test, generate-test-records, generate-avro-records, generate-avro-protocols">
+  <target name="compile-core-test" depends="jar, ivy-retrieve-test, generate-test-records, generate-avro-records, generate-avro-protocols">
     <mkdir dir="${test.core.build.classes}"/>
     <javac 
      encoding="${build.encoding}" 
@@ -752,9 +779,10 @@
          <sysproperty key="java.security.krb5.conf" value="@{test.krb5.conf.filename}"/>
         <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
         <sysproperty key="java.library.path"
-          value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+          value="${build.native}/lib:${lib.dir}/native/${build.platform}:${snappy.lib}"/>
         <sysproperty key="java.security.egd" value="file:///dev/urandom" />
         <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+
         <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
         <syspropertyset dynamic="no">
           <propertyref name="io.compression.codec.lzo.class"/>
@@ -875,7 +903,6 @@
 
  <property name="findbugs.home" value=""/>
   <target name="findbugs" depends="check-for-findbugs, jar" if="findbugs.present" description="Run findbugs if present">
-    <property environment="env"/>
     <property name="findbugs.out.dir" value="${test.build.dir}/findbugs"/>
     <property name="findbugs.exclude.file" value="${test.src.dir}/findbugsExcludeFile.xml"/>
     <property name="findbugs.report.htmlfile" value="${findbugs.out.dir}/hadoop-findbugs-report.html"/>
@@ -1108,6 +1135,8 @@
 	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
 	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
 	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
+          <env key="BUNDLE_SNAPPY_LIB" value="${bundle.snappy}"/>
+          <env key="SNAPPY_LIB_DIR" value="${snappy.prefix}/lib"/>
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 
@@ -1209,6 +1238,8 @@
 	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
 	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
 	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib"/>
+          <env key="BUNDLE_SNAPPY_LIB" value="${bundle.snappy}"/>
+          <env key="SNAPPY_LIB_DIR" value="${snappy.prefix}/lib"/>
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 

Propchange: hadoop/common/branches/HDFS-1073/common/src/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1 +1,2 @@
+/hadoop/common/trunk/common/src/docs:1134995-1143556
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1073/common/src/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Wed Jul  6 20:45:21 2011
@@ -1,2 +1,3 @@
+/hadoop/common/trunk/common/src/java:1134995-1143556
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1073/common/src/java/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/core-default.xml?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/core-default.xml (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/core-default.xml Wed Jul  6 20:45:21 2011
@@ -1,4 +1,6 @@
 <?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
@@ -15,7 +17,6 @@
    See the License for the specific language governing permissions and
    limitations under the License.
 -->
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
 
 <!-- Do not modify this file directly.  Instead, copy entries that you -->
 <!-- wish to modify from this file into core-site.xml and change them -->
@@ -174,7 +175,7 @@
 
 <property>
   <name>io.compression.codecs</name>
-  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec</value>
+  <value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec,org.apache.hadoop.io.compress.SnappyCodec</value>
   <description>A list of the compression codec classes that can be used 
                for compression/decompression.</description>
 </property>
@@ -550,8 +551,8 @@
 	<name>net.topology.node.switch.mapping.impl</name>
   <value>org.apache.hadoop.net.ScriptBasedMapping</value>
   <description> The default implementation of the DNSToSwitchMapping. It
-    invokes a script specified in topology.script.file.name to resolve
-    node names. If the value for topology.script.file.name is not set, the
+    invokes a script specified in net.topology.script.file.name to resolve
+    node names. If the value for net.topology.script.file.name is not set, the
     default value of DEFAULT_RACK is returned for all node names.
   </description>
 </property>
@@ -569,7 +570,7 @@
   <name>net.topology.script.number.args</name>
   <value>100</value>
   <description> The max number of args that the script configured with 
-    topology.script.file.name should be run with. Each arg is an
+    net.topology.script.file.name should be run with. Each arg is an
     IP address.
   </description>
 </property>

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFileSystem.java Wed Jul  6 20:45:21 2011
@@ -151,8 +151,7 @@ public abstract class ChecksumFileSystem
         set(fs.verifyChecksum, null, 1, 0);
       } catch (IOException e) {                   // loudly ignore
         LOG.warn("Problem opening checksum file: "+ file + 
-                 ".  Ignoring exception: " + 
-                 StringUtils.stringifyException(e));
+                 ".  Ignoring exception: " , e); 
         set(fs.verifyChecksum, null, 1, 0);
       }
     }

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFs.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFs.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/ChecksumFs.java Wed Jul  6 20:45:21 2011
@@ -142,8 +142,7 @@ public abstract class ChecksumFs extends
         set(fs.verifyChecksum, null, 1, 0);
       } catch (IOException e) {                   // loudly ignore
         LOG.warn("Problem opening checksum file: "+ file + 
-                 ".  Ignoring exception: " + 
-                 StringUtils.stringifyException(e));
+                 ".  Ignoring exception: " , e); 
         set(fs.verifyChecksum, null, 1, 0);
       }
     }

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/CommonConfigurationKeys.java Wed Jul  6 20:45:21 2011
@@ -85,5 +85,13 @@ public class CommonConfigurationKeys ext
    */
   public static final String  NET_TOPOLOGY_CONFIGURED_NODE_MAPPING_KEY =
     "net.topology.configured.node.mapping";
+
+  /** Internal buffer size for Snappy compressor/decompressors */
+  public static final String IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY =
+      "io.compression.codec.snappy.buffersize";
+
+  /** Default value for IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY */
+  public static final int IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_DEFAULT =
+      256 * 1024;
 }
 

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileSystem.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileSystem.java Wed Jul  6 20:45:21 2011
@@ -1151,6 +1151,9 @@ public abstract class FileSystem extends
   private void listStatus(ArrayList<FileStatus> results, Path f,
       PathFilter filter) throws FileNotFoundException, IOException {
     FileStatus listing[] = listStatus(f);
+    if (listing == null) {
+      throw new IOException("Error accessing " + f);
+    }
 
     for (int i = 0; i < listing.length; i++) {
       if (filter.accept(listing[i].getPath())) {

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileUtil.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/FileUtil.java Wed Jul  6 20:45:21 2011
@@ -652,9 +652,9 @@ public class FileUtil {
     try {
       shExec.execute();
     }catch(Exception e) {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Error while changing permission : " + filename 
-            +" Exception: " + StringUtils.stringifyException(e));
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Error while changing permission : " + filename
+            + " Exception: ", e);
       }
     }
     return shExec.getExitCode();

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/LocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/LocalDirAllocator.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/LocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/LocalDirAllocator.java Wed Jul  6 20:45:21 2011
@@ -232,15 +232,14 @@ public class LocalDirAllocator {
                 dirs.add(localDirs[i]);
                 dfList.add(new DF(new File(localDirs[i]), 30000));
               } catch (DiskErrorException de) {
-                LOG.warn( localDirs[i] + "is not writable\n" +
-                    StringUtils.stringifyException(de));
+                LOG.warn( localDirs[i] + "is not writable\n", de);
               }
             } else {
               LOG.warn( "Failed to create " + localDirs[i]);
             }
           } catch (IOException ie) { 
             LOG.warn( "Failed to create " + localDirs[i] + ": " +
-                ie.getMessage() + "\n" + StringUtils.stringifyException(ie));
+                ie.getMessage() + "\n", ie);
           } //ignore
         }
         localDirs = dirs.toArray(new String[dirs.size()]);
@@ -261,7 +260,7 @@ public class LocalDirAllocator {
         DiskChecker.checkDir(new File(file.getParent().toUri().getPath()));
         return file;
       } catch (DiskErrorException d) {
-        LOG.warn(StringUtils.stringifyException(d));
+        LOG.warn("Disk Error Exception: ", d);
         return null;
       }
     }

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/Trash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/Trash.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/Trash.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/Trash.java Wed Jul  6 20:45:21 2011
@@ -327,15 +327,13 @@ public class Trash extends Configured {
             }
           }
         } catch (Exception e) {
-          LOG.warn("RuntimeException during Trash.Emptier.run() " + 
-                   StringUtils.stringifyException(e));
+          LOG.warn("RuntimeException during Trash.Emptier.run(): ", e); 
         }
       }
       try {
         fs.close();
       } catch(IOException e) {
-        LOG.warn("Trash cannot close FileSystem. " +
-            StringUtils.stringifyException(e));
+        LOG.warn("Trash cannot close FileSystem: ", e);
       }
     }
 

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/s3/INode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/s3/INode.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/s3/INode.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/s3/INode.java Wed Jul  6 20:45:21 2011
@@ -27,6 +27,7 @@ import java.io.InputStream;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.IOUtils;
 
 /**
  * Holds file metadata including type (regular file, or directory),
@@ -82,15 +83,20 @@ public class INode {
   public InputStream serialize() throws IOException {
     ByteArrayOutputStream bytes = new ByteArrayOutputStream();
     DataOutputStream out = new DataOutputStream(bytes);
-    out.writeByte(fileType.ordinal());
-    if (isFile()) {
-      out.writeInt(blocks.length);
-      for (int i = 0; i < blocks.length; i++) {
-        out.writeLong(blocks[i].getId());
-        out.writeLong(blocks[i].getLength());
+    try {
+      out.writeByte(fileType.ordinal());
+      if (isFile()) {
+        out.writeInt(blocks.length);
+        for (int i = 0; i < blocks.length; i++) {
+          out.writeLong(blocks[i].getId());
+          out.writeLong(blocks[i].getLength());
+        }
       }
+      out.close();
+      out = null;
+    } finally {
+      IOUtils.closeStream(out);
     }
-    out.close();
     return new ByteArrayInputStream(bytes.toByteArray());
   }
   

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/shell/Command.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/shell/Command.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/shell/Command.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/fs/shell/Command.java Wed Jul  6 20:45:21 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.shell;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.lang.reflect.Field;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.LinkedList;
@@ -378,7 +379,7 @@ abstract public class Command extends Co
   public String getName() {
     return (name == null)
       ? getCommandField("NAME")
-      : name.startsWith("-") ? name.substring(1) : name; // this is a historical method
+      : name.startsWith("-") ? name.substring(1) : name;
   }
 
   /**
@@ -433,7 +434,9 @@ abstract public class Command extends Co
   private String getCommandField(String field) {
     String value;
     try {
-      value = this.getClass().getField(field).get(this).toString();
+      Field f = this.getClass().getDeclaredField(field);
+      f.setAccessible(true);
+      value = f.get(this).toString();
     } catch (Exception e) {
       throw new RuntimeException(
           "failed to get " + this.getClass().getSimpleName()+"."+field, e);

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/http/HttpServer.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/http/HttpServer.java Wed Jul  6 20:45:21 2011
@@ -800,6 +800,9 @@ public class HttpServer implements Filte
       public String[] getParameterValues(String name) {
         String unquoteName = HtmlQuoting.unquoteHtmlChars(name);
         String[] unquoteValue = rawRequest.getParameterValues(unquoteName);
+        if (unquoteValue == null) {
+          return null;
+        }
         String[] result = new String[unquoteValue.length];
         for(int i=0; i < result.length; ++i) {
           result[i] = HtmlQuoting.quoteHtmlChars(unquoteValue[i]);

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BloomMapFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BloomMapFile.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BloomMapFile.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BloomMapFile.java Wed Jul  6 20:45:21 2011
@@ -31,7 +31,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.util.Options;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.bloom.DynamicBloomFilter;
 import org.apache.hadoop.util.bloom.Filter;
@@ -187,9 +186,14 @@ public class BloomMapFile {
     public synchronized void close() throws IOException {
       super.close();
       DataOutputStream out = fs.create(new Path(dir, BLOOM_FILE_NAME), true);
-      bloomFilter.write(out);
-      out.flush();
-      out.close();
+      try {
+        bloomFilter.write(out);
+        out.flush();
+        out.close();
+        out = null;
+      } finally {
+        IOUtils.closeStream(out);
+      }
     }
 
   }
@@ -225,15 +229,20 @@ public class BloomMapFile {
     
     private void initBloomFilter(Path dirName, 
                                  Configuration conf) {
+      
+      DataInputStream in = null;
       try {
         FileSystem fs = dirName.getFileSystem(conf);
-        DataInputStream in = fs.open(new Path(dirName, BLOOM_FILE_NAME));
+        in = fs.open(new Path(dirName, BLOOM_FILE_NAME));
         bloomFilter = new DynamicBloomFilter();
         bloomFilter.readFields(in);
         in.close();
+        in = null;
       } catch (IOException ioe) {
         LOG.warn("Can't open BloomFilter: " + ioe + " - fallback to MapFile.");
         bloomFilter = null;
+      } finally {
+        IOUtils.closeStream(in);
       }
     }
     

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BytesWritable.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BytesWritable.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/BytesWritable.java Wed Jul  6 20:45:21 2011
@@ -51,8 +51,19 @@ public class BytesWritable extends Binar
    * @param bytes This array becomes the backing storage for the object.
    */
   public BytesWritable(byte[] bytes) {
+    this(bytes, bytes.length);
+  }
+
+  /**
+   * Create a BytesWritable using the byte array as the initial value
+   * and length as the length. Use this constructor if the array is larger
+   * than the value it represents.
+   * @param bytes This array becomes the backing storage for the object.
+   * @param length The number of bytes to use from array.
+   */
+  public BytesWritable(byte[] bytes, int length) {
     this.bytes = bytes;
-    this.size = bytes.length;
+    this.size = length;
   }
   
   /**

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/IOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/IOUtils.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/IOUtils.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/IOUtils.java Wed Jul  6 20:45:21 2011
@@ -36,6 +36,7 @@ public class IOUtils {
 
   /**
    * Copies from one stream to another.
+   *
    * @param in InputStrem to read from
    * @param out OutputStream to write to
    * @param buffSize the size of the buffer 
@@ -44,7 +45,6 @@ public class IOUtils {
    */
   public static void copyBytes(InputStream in, OutputStream out, int buffSize, boolean close) 
     throws IOException {
-
     try {
       copyBytes(in, out, buffSize);
       if(close) {
@@ -70,7 +70,6 @@ public class IOUtils {
    */
   public static void copyBytes(InputStream in, OutputStream out, int buffSize) 
     throws IOException {
-
     PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null;
     byte buf[] = new byte[buffSize];
     int bytesRead = in.read(buf);
@@ -82,9 +81,11 @@ public class IOUtils {
       bytesRead = in.read(buf);
     }
   }
+
   /**
    * Copies from one stream to another. <strong>closes the input and output streams 
    * at the end</strong>.
+   *
    * @param in InputStrem to read from
    * @param out OutputStream to write to
    * @param conf the Configuration object 
@@ -96,7 +97,8 @@ public class IOUtils {
   
   /**
    * Copies from one stream to another.
-   * @param in InputStrem to read from
+   *
+   * @param in InputStream to read from
    * @param out OutputStream to write to
    * @param conf the Configuration object
    * @param close whether or not close the InputStream and 
@@ -106,21 +108,64 @@ public class IOUtils {
     throws IOException {
     copyBytes(in, out, conf.getInt("io.file.buffer.size", 4096),  close);
   }
+
+  /**
+   * Copies count bytes from one stream to another.
+   *
+   * @param in InputStream to read from
+   * @param out OutputStream to write to
+   * @param count number of bytes to copy
+   * @param close whether to close the streams
+   * @throws IOException if bytes can not be read or written
+   */
+  public static void copyBytes(InputStream in, OutputStream out, long count,
+      boolean close) throws IOException {
+    byte buf[] = new byte[4096];
+    long bytesRemaining = count;
+    int bytesRead;
+
+    try {
+      while (bytesRemaining > 0) {
+        int bytesToRead = (int)
+          (bytesRemaining < buf.length ? bytesRemaining : buf.length);
+
+        bytesRead = in.read(buf, 0, bytesToRead);
+        if (bytesRead == -1)
+          break;
+
+        out.write(buf, 0, bytesRead);
+        bytesRemaining -= bytesRead;
+      }
+      if (close) {
+        out.close();
+        out = null;
+        in.close();
+        in = null;
+      }
+    } finally {
+      if (close) {
+        closeStream(out);
+        closeStream(in);
+      }
+    }
+  }
   
-  /** Reads len bytes in a loop.
-   * @param in The InputStream to read from
+  /**
+   * Reads len bytes in a loop.
+   *
+   * @param in InputStream to read from
    * @param buf The buffer to fill
    * @param off offset from the buffer
    * @param len the length of bytes to read
    * @throws IOException if it could not read requested number of bytes 
    * for any reason (including EOF)
    */
-  public static void readFully( InputStream in, byte buf[],
-      int off, int len ) throws IOException {
+  public static void readFully(InputStream in, byte buf[],
+      int off, int len) throws IOException {
     int toRead = len;
-    while ( toRead > 0 ) {
-      int ret = in.read( buf, off, toRead );
-      if ( ret < 0 ) {
+    while (toRead > 0) {
+      int ret = in.read(buf, off, toRead);
+      if (ret < 0) {
         throw new IOException( "Premature EOF from inputStream");
       }
       toRead -= ret;
@@ -128,16 +173,17 @@ public class IOUtils {
     }
   }
   
-  /** Similar to readFully(). Skips bytes in a loop.
+  /**
+   * Similar to readFully(). Skips bytes in a loop.
    * @param in The InputStream to skip bytes from
    * @param len number of bytes to skip.
    * @throws IOException if it could not skip requested number of bytes 
    * for any reason (including EOF)
    */
-  public static void skipFully( InputStream in, long len ) throws IOException {
-    while ( len > 0 ) {
-      long ret = in.skip( len );
-      if ( ret < 0 ) {
+  public static void skipFully(InputStream in, long len) throws IOException {
+    while (len > 0) {
+      long ret = in.skip(len);
+      if (ret < 0) {
         throw new IOException( "Premature EOF from inputStream");
       }
       len -= ret;
@@ -147,11 +193,12 @@ public class IOUtils {
   /**
    * Close the Closeable objects and <b>ignore</b> any {@link IOException} or 
    * null pointers. Must only be used for cleanup in exception handlers.
+   *
    * @param log the log to record problems to at debug level. Can be null.
    * @param closeables the objects to close
    */
   public static void cleanup(Log log, java.io.Closeable... closeables) {
-    for(java.io.Closeable c : closeables) {
+    for (java.io.Closeable c : closeables) {
       if (c != null) {
         try {
           c.close();
@@ -167,27 +214,29 @@ public class IOUtils {
   /**
    * Closes the stream ignoring {@link IOException}.
    * Must only be called in cleaning up from exception handlers.
+   *
    * @param stream the Stream to close
    */
-  public static void closeStream( java.io.Closeable stream ) {
+  public static void closeStream(java.io.Closeable stream) {
     cleanup(null, stream);
   }
   
   /**
-   * Closes the socket ignoring {@link IOException} 
+   * Closes the socket ignoring {@link IOException}
+   *
    * @param sock the Socket to close
    */
-  public static void closeSocket( Socket sock ) {
-    // avoids try { close() } dance
-    if ( sock != null ) {
+  public static void closeSocket(Socket sock) {
+    if (sock != null) {
       try {
-       sock.close();
-      } catch ( IOException ignored ) {
+        sock.close();
+      } catch (IOException ignored) {
       }
     }
   }
   
-  /** /dev/null of OutputStreams.
+  /**
+   * The /dev/null of OutputStreams.
    */
   public static class NullOutputStream extends OutputStream {
     public void write(byte[] b, int off, int len) throws IOException {

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/ObjectWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/ObjectWritable.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/ObjectWritable.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/ObjectWritable.java Wed Jul  6 20:45:21 2011
@@ -19,6 +19,8 @@
 package org.apache.hadoop.io;
 
 import java.lang.reflect.Array;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 
 import java.io.*;
 import java.util.*;
@@ -26,6 +28,9 @@ import java.util.*;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.*;
+import org.apache.hadoop.util.ProtoUtil;
+
+import com.google.protobuf.Message;
 
 /** A polymorphic Writable that writes an instance with it's class name.
  * Handles arrays, strings and primitive types without a Writable wrapper.
@@ -191,6 +196,9 @@ public class ObjectWritable implements W
       UTF8.writeString(out, instance.getClass().getName());
       ((Writable)instance).write(out);
 
+    } else if (Message.class.isAssignableFrom(declaredClass)) {
+      ((Message)instance).writeDelimitedTo(
+          DataOutputOutputStream.constructOutputStream(out));
     } else {
       throw new IOException("Can't write: "+instance+" as "+declaredClass);
     }
@@ -261,6 +269,8 @@ public class ObjectWritable implements W
       instance = UTF8.readString(in);
     } else if (declaredClass.isEnum()) {         // enum
       instance = Enum.valueOf((Class<? extends Enum>) declaredClass, UTF8.readString(in));
+    } else if (Message.class.isAssignableFrom(declaredClass)) {
+      instance = tryInstantiateProtobuf(declaredClass, in);
     } else {                                      // Writable
       Class instanceClass = null;
       String str = UTF8.readString(in);
@@ -286,6 +296,67 @@ public class ObjectWritable implements W
   }
 
   /**
+   * Try to instantiate a protocol buffer of the given message class
+   * from the given input stream.
+   * 
+   * @param protoClass the class of the generated protocol buffer
+   * @param dataIn the input stream to read from
+   * @return the instantiated Message instance
+   * @throws IOException if an IO problem occurs
+   */
+  private static Message tryInstantiateProtobuf(
+      Class<?> protoClass,
+      DataInput dataIn) throws IOException {
+
+    try {
+      if (dataIn instanceof InputStream) {
+        // We can use the built-in parseDelimitedFrom and not have to re-copy
+        // the data
+        Method parseMethod = getStaticProtobufMethod(protoClass,
+            "parseDelimitedFrom", InputStream.class);
+        return (Message)parseMethod.invoke(null, (InputStream)dataIn);
+      } else {
+        // Have to read it into a buffer first, since protobuf doesn't deal
+        // with the DataInput interface directly.
+        
+        // Read the size delimiter that writeDelimitedTo writes
+        int size = ProtoUtil.readRawVarint32(dataIn);
+        if (size < 0) {
+          throw new IOException("Invalid size: " + size);
+        }
+      
+        byte[] data = new byte[size];
+        dataIn.readFully(data);
+        Method parseMethod = getStaticProtobufMethod(protoClass,
+            "parseFrom", byte[].class);
+        return (Message)parseMethod.invoke(null, data);
+      }
+    } catch (InvocationTargetException e) {
+      
+      if (e.getCause() instanceof IOException) {
+        throw (IOException)e.getCause();
+      } else {
+        throw new IOException(e.getCause());
+      }
+    } catch (IllegalAccessException iae) {
+      throw new AssertionError("Could not access parse method in " +
+          protoClass);
+    }
+  }
+
+  static Method getStaticProtobufMethod(Class<?> declaredClass, String method,
+      Class<?> ... args) {
+
+    try {
+      return declaredClass.getMethod(method, args);
+    } catch (Exception e) {
+      // This is a bug in Hadoop - protobufs should all have this static method
+      throw new AssertionError("Protocol buffer class " + declaredClass +
+          " does not have an accessible parseFrom(InputStream) method!");
+    }
+  }
+
+  /**
    * Find and load the class with given name <tt>className</tt> by first finding
    * it in the specified <tt>conf</tt>. If the specified <tt>conf</tt> is null,
    * try load it directly.

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/WritableUtils.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/WritableUtils.java Wed Jul  6 20:45:21 2011
@@ -62,8 +62,13 @@ public final class WritableUtils  {
     if (bytes != null) {
       ByteArrayOutputStream bos =  new ByteArrayOutputStream();
       GZIPOutputStream gzout = new GZIPOutputStream(bos);
-      gzout.write(bytes, 0, bytes.length);
-      gzout.close();
+      try {
+        gzout.write(bytes, 0, bytes.length);
+        gzout.close();
+        gzout = null;
+      } finally {
+        IOUtils.closeStream(gzout);
+      }
       byte[] buffer = bos.toByteArray();
       int len = buffer.length;
       out.writeInt(len);

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java Wed Jul  6 20:45:21 2011
@@ -27,6 +27,8 @@ package org.apache.hadoop.io.compress.bz
 import java.io.OutputStream;
 import java.io.IOException;
 
+import org.apache.hadoop.io.IOUtils;
+
 /**
  * An output stream that compresses into the BZip2 format (without the file
  * header chars) into another stream.
@@ -727,8 +729,13 @@ public class CBZip2OutputStream extends 
   public void close() throws IOException {
     if (out != null) {
       OutputStream outShadow = this.out;
-      finish();
-      outShadow.close();
+      try {
+        finish();
+        outShadow.close();
+        outShadow = null;
+      } finally {
+        IOUtils.closeStream(outShadow);
+      }
     }
   }
   

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Wed Jul  6 20:45:21 2011
@@ -61,7 +61,7 @@ class RetryInvocationHandler implements 
         if (!policy.shouldRetry(e, retries++)) {
           LOG.info("Exception while invoking " + method.getName()
                    + " of " + implementation.getClass() + ". Not retrying."
-                   + StringUtils.stringifyException(e));
+                   , e);
           if (!method.getReturnType().equals(Void.TYPE)) {
             throw e; // non-void methods can't fail without an exception
           }
@@ -70,7 +70,7 @@ class RetryInvocationHandler implements 
         if(LOG.isDebugEnabled()) {
           LOG.debug("Exception while invoking " + method.getName()
               + " of " + implementation.getClass() + ". Retrying."
-              + StringUtils.stringifyException(e));
+              , e);
         }
       }
     }

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java Wed Jul  6 20:45:21 2011
@@ -71,8 +71,7 @@ public class SerializationFactory extend
       serializations.add((Serialization)
 	  ReflectionUtils.newInstance(serializionClass, getConf()));
     } catch (ClassNotFoundException e) {
-      LOG.warn("Serialization class not found: " +
-          StringUtils.stringifyException(e));
+      LOG.warn("Serialization class not found: ", e);
     }
   }
 

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Client.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Client.java Wed Jul  6 20:45:21 2011
@@ -19,10 +19,8 @@
 package org.apache.hadoop.ipc;
 
 import java.net.InetAddress;
-import java.net.NetworkInterface;
 import java.net.Socket;
 import java.net.InetSocketAddress;
-import java.net.SocketException;
 import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
 import java.net.ConnectException;
@@ -254,7 +252,7 @@ public class Client {
       Class<?> protocol = remoteId.getProtocol();
       this.useSasl = UserGroupInformation.isSecurityEnabled();
       if (useSasl && protocol != null) {
-        TokenInfo tokenInfo = protocol.getAnnotation(TokenInfo.class);
+        TokenInfo tokenInfo = SecurityUtil.getTokenInfo(protocol);
         if (tokenInfo != null) {
           TokenSelector<? extends TokenIdentifier> tokenSelector = null;
           try {
@@ -269,7 +267,7 @@ public class Client {
               .getHostAddress() + ":" + addr.getPort()), 
               ticket.getTokens());
         }
-        KerberosInfo krbInfo = protocol.getAnnotation(KerberosInfo.class);
+        KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol);
         if (krbInfo != null) {
           serverPrincipal = remoteId.getServerPrincipal();
           if (LOG.isDebugEnabled()) {
@@ -585,8 +583,12 @@ public class Client {
           start();
           return;
         }
-      } catch (IOException e) {
-        markClosed(e);
+      } catch (Throwable t) {
+        if (t instanceof IOException) {
+          markClosed((IOException)t);
+        } else {
+          markClosed(new IOException("Couldn't set up IO streams", t));
+        }
         close();
       }
     }
@@ -1283,7 +1285,7 @@ public class Client {
       if (!UserGroupInformation.isSecurityEnabled() || protocol == null) {
         return null;
       }
-      KerberosInfo krbInfo = protocol.getAnnotation(KerberosInfo.class);
+      KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol);
       if (krbInfo != null) {
         String serverKey = krbInfo.serverPrincipal();
         if (serverKey == null) {

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Server.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/ipc/Server.java Wed Jul  6 20:45:21 2011
@@ -1501,7 +1501,7 @@ public abstract class Server {
                   );
             }
           } catch (Throwable e) {
-            LOG.info(getName()+", call "+call+": error: " + e, e);
+            LOG.info(getName() + ", call: " + call + ", error: ", e);
             errorClass = e.getClass().getName();
             error = StringUtils.stringifyException(e);
             // Remove redundant error class name from the beginning of the stack trace

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/jmx/JMXJsonServlet.java Wed Jul  6 20:45:21 2011
@@ -67,8 +67,20 @@ import org.codehaus.jackson.JsonGenerato
  * For example <code>http://.../jmx?qry=Hadoop:*</code> will return
  * all hadoop metrics exposed through JMX.
  * <p>
- * If the <code>qry</code> parameter is not formatted correctly then a
- * 400 BAD REQUEST http response code will be returned. 
+ * The optional <code>get</code> parameter is used to query an specific 
+ * attribute of a JMX bean.  The format of the URL is
+ * <code>http://.../jmx?get=MXBeanName::AttributeName<code>
+ * <p>
+ * For example 
+ * <code>
+ * http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId
+ * </code> will return the cluster id of the namenode mxbean.
+ * <p>
+ * If the <code>qry</code> or the <code>get</code> parameter is not formatted 
+ * correctly then a 400 BAD REQUEST http response code will be returned. 
+ * <p>
+ * If a resouce such as a mbean or attribute can not be found, 
+ * a 404 SC_NOT_FOUND http response code will be returned. 
  * <p>
  * The return format is JSON and in the form
  * <p>
@@ -150,25 +162,49 @@ public class JMXJsonServlet extends Http
         jg.writeStringField("result", "ERROR");
         jg.writeStringField("message", "No MBeanServer could be found");
         jg.close();
+        LOG.error("No MBeanServer could be found.");
+        response.setStatus(HttpServletResponse.SC_NOT_FOUND);
         return;
       }
+      
+      // query per mbean attribute
+      String getmethod = request.getParameter("get");
+      if (getmethod != null) {
+        String[] splitStrings = getmethod.split("\\:\\:");
+        if (splitStrings.length != 2) {
+          jg.writeStringField("result", "ERROR");
+          jg.writeStringField("message", "query format is not as expected.");
+          jg.close();
+          response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+          return;
+        }
+        listBeans(jg, new ObjectName(splitStrings[0]), splitStrings[1],
+            response);
+        jg.close();
+        return;
+      }
+
+      // query per mbean
       String qry = request.getParameter("qry");
       if (qry == null) {
         qry = "*:*";
       }
-      listBeans(jg, new ObjectName(qry));
+      listBeans(jg, new ObjectName(qry), null, response);
       jg.close();
-    } catch (IOException e) {
+
+    } catch ( IOException e ) {
       LOG.error("Caught an exception while processing JMX request", e);
       response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-    } catch (MalformedObjectNameException e) {
+    } catch ( MalformedObjectNameException e ) {
       LOG.error("Caught an exception while processing JMX request", e);
       response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
     }
   }
 
   // --------------------------------------------------------- Private Methods
-  private void listBeans(JsonGenerator jg, ObjectName qry) throws IOException {
+  private void listBeans(JsonGenerator jg, ObjectName qry, String attribute, 
+      HttpServletResponse response) 
+  throws IOException {
     LOG.debug("Listing beans for "+qry);
     Set<ObjectName> names = null;
     names = mBeanServer.queryNames(qry, null);
@@ -178,62 +214,89 @@ public class JMXJsonServlet extends Http
     while (it.hasNext()) {
       ObjectName oname = it.next();
       MBeanInfo minfo;
-      String code;
+      String code = "";
+      Object attributeinfo = null;
       try {
         minfo = mBeanServer.getMBeanInfo(oname);
         code = minfo.getClassName();
+        String prs = "";
         try {
           if ("org.apache.commons.modeler.BaseModelMBean".equals(code)) {
-            code = (String) mBeanServer.getAttribute(oname, "modelerType");
+            prs = "modelerType";
+            code = (String) mBeanServer.getAttribute(oname, prs);
+          }
+          if (attribute!=null) {
+            prs = attribute;
+            attributeinfo = mBeanServer.getAttribute(oname, prs);
           }
         } catch (AttributeNotFoundException e) {
-          //Ignored the modelerType attribute was not found, so use the class name instead.
+          // If the modelerType attribute was not found, the class name is used
+          // instead.
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
         } catch (MBeanException e) {
-          //The code inside the attribute getter threw an exception so log it, and
-          // fall back on the class name
-          LOG.error("getting attribute modelerType of "+oname+" threw an exception", e);
+          // The code inside the attribute getter threw an exception so log it,
+          // and fall back on the class name
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
         } catch (RuntimeException e) {
-          //For some reason even with an MBeanException available to them Runtime exceptions
-          //can still find their way through, so treat them the same as MBeanException
-          LOG.error("getting attribute modelerType of "+oname+" threw an exception", e);
-        } catch (ReflectionException e) {
-          //This happens when the code inside the JMX bean (setter?? from the java docs)
-          //threw an exception, so log it and fall back on the class name
-          LOG.error("getting attribute modelerType of "+oname+" threw an exception", e);
+          // For some reason even with an MBeanException available to them
+          // Runtime exceptionscan still find their way through, so treat them
+          // the same as MBeanException
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
+        } catch ( ReflectionException e ) {
+          // This happens when the code inside the JMX bean (setter?? from the
+          // java docs) threw an exception, so log it and fall back on the 
+          // class name
+          LOG.error("getting attribute " + prs + " of " + oname
+              + " threw an exception", e);
         }
       } catch (InstanceNotFoundException e) {
         //Ignored for some reason the bean was not found so don't output it
         continue;
-      } catch (IntrospectionException e) {
-        //This is an internal error, something odd happened with reflection so log it and
-        //don't output the bean.
-        LOG.error("Problem while trying to process JMX query: "+qry+" with MBean "+oname, e); 
+      } catch ( IntrospectionException e ) {
+        // This is an internal error, something odd happened with reflection so
+        // log it and don't output the bean.
+        LOG.error("Problem while trying to process JMX query: " + qry
+            + " with MBean " + oname, e);
         continue;
-      } catch (ReflectionException e) {
-        //This happens when the code inside the JMX bean threw an exception, so log it and
-        //don't output the bean.
-        LOG.error("Problem while trying to process JMX query: "+qry+" with MBean "+oname, e);
+      } catch ( ReflectionException e ) {
+        // This happens when the code inside the JMX bean threw an exception, so
+        // log it and don't output the bean.
+        LOG.error("Problem while trying to process JMX query: " + qry
+            + " with MBean " + oname, e);
         continue;
       }
 
       jg.writeStartObject();
       jg.writeStringField("name", oname.toString());
-      // can't be null - I think
-
+      
       jg.writeStringField("modelerType", code);
-
-      MBeanAttributeInfo attrs[] = minfo.getAttributes();
-      for (int i = 0; i < attrs.length; i++) {
-        writeAttribute(jg, oname, attrs[i]);
+      if ((attribute != null) && (attributeinfo == null)) {
+        jg.writeStringField("result", "ERROR");
+        jg.writeStringField("message", "No attribute with name " + attribute
+            + " was found.");
+        jg.writeEndObject();
+        jg.writeEndArray();
+        jg.close();
+        response.setStatus(HttpServletResponse.SC_NOT_FOUND);
+        return;
+      }
+      
+      if (attribute != null) {
+        writeAttribute(jg, attribute, attributeinfo);
+      } else {
+        MBeanAttributeInfo attrs[] = minfo.getAttributes();
+        for (int i = 0; i < attrs.length; i++) {
+          writeAttribute(jg, oname, attrs[i]);
+        }
       }
-      //  LOG.error("Caught Error writing value ",t);
-      //  ExceptionUtils.handleThrowable(t);
-      //}
       jg.writeEndObject();
     }
     jg.writeEndArray();
   }
-  
+
   private void writeAttribute(JsonGenerator jg, ObjectName oname, MBeanAttributeInfo attr) throws IOException {
     if (!attr.isReadable()) {
       return;

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsIntValue.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsIntValue.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsIntValue.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsIntValue.java Wed Jul  6 20:45:21 2011
@@ -97,8 +97,7 @@ public class MetricsIntValue extends Met
       try {
         mr.setMetric(getName(), value);
       } catch (Exception e) {
-        LOG.info("pushMetric failed for " + getName() + "\n" +
-            StringUtils.stringifyException(e));
+        LOG.info("pushMetric failed for " + getName() + "\n", e);
       }
     }
     changed = false;

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java Wed Jul  6 20:45:21 2011
@@ -106,8 +106,7 @@ public class MetricsTimeVaryingInt exten
     try {
       mr.incrMetric(getName(), getPreviousIntervalValue());
     } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
+      LOG.info("pushMetric failed for " + getName() + "\n" , e);
     }
   }
   

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java Wed Jul  6 20:45:21 2011
@@ -102,8 +102,7 @@ public class MetricsTimeVaryingLong exte
     try {
       mr.incrMetric(getName(), getPreviousIntervalValue());
     } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
+      LOG.info("pushMetric failed for " + getName() + "\n" , e);
     }
   }
   

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java Wed Jul  6 20:45:21 2011
@@ -150,8 +150,7 @@ public class MetricsTimeVaryingRate exte
       mr.incrMetric(getName() + "_num_ops", getPreviousIntervalNumOps());
       mr.setMetric(getName() + "_avg_time", getPreviousIntervalAverageTime());
     } catch (Exception e) {
-      LOG.info("pushMetric failed for " + getName() + "\n" +
-          StringUtils.stringifyException(e));
+      LOG.info("pushMetric failed for " + getName() + "\n" , e);
     }
   }
   

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/ScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/ScriptBasedMapping.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/ScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/ScriptBasedMapping.java Wed Jul  6 20:45:21 2011
@@ -156,7 +156,7 @@ implements Configurable
         s.execute();
         allOutput.append(s.getOutput() + " ");
       } catch (Exception e) {
-        LOG.warn(StringUtils.stringifyException(e));
+        LOG.warn("Exception: ", e);
         return null;
       }
       loopCount++; 

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/SocketIOWithTimeout.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/SocketIOWithTimeout.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/SocketIOWithTimeout.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/net/SocketIOWithTimeout.java Wed Jul  6 20:45:21 2011
@@ -288,8 +288,7 @@ abstract class SocketIOWithTimeout {
           try {
             selector.close();
           } catch (IOException e) {
-            LOG.warn("Unexpected exception while closing selector : " +
-                     StringUtils.stringifyException(e));
+            LOG.warn("Unexpected exception while closing selector : ", e);
           }
         }
       }    
@@ -361,8 +360,7 @@ abstract class SocketIOWithTimeout {
         try {
           info.selector.selectNow();
         } catch (IOException e) {
-          LOG.info("Unexpected Exception while clearing selector : " +
-                   StringUtils.stringifyException(e));
+          LOG.info("Unexpected Exception while clearing selector : ", e);
           // don't put the selector back.
           info.close();
           return ret; 

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityUtil.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/SecurityUtil.java Wed Jul  6 20:45:21 2011
@@ -22,6 +22,7 @@ import java.net.URI;
 import java.net.URL;
 import java.net.UnknownHostException;
 import java.security.AccessController;
+import java.util.ServiceLoader;
 import java.util.Set;
 
 import javax.security.auth.Subject;
@@ -33,8 +34,8 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.token.TokenInfo;
 
 import sun.security.jgss.krb5.Krb5Util;
 import sun.security.krb5.Credentials;
@@ -291,4 +292,62 @@ public class SecurityUtil {
   public static String getHostFromPrincipal(String principalName) {
     return new KerberosName(principalName).getHostName();
   }
+
+  private static ServiceLoader<SecurityInfo> securityInfoProviders = 
+    ServiceLoader.load(SecurityInfo.class);
+  private static SecurityInfo[] testProviders = new SecurityInfo[0];
+
+  /**
+   * Test setup method to register additional providers.
+   * @param providers a list of high priority providers to use
+   */
+  @InterfaceAudience.Private
+  public static void setSecurityInfoProviders(SecurityInfo... providers) {
+    testProviders = providers;
+  }
+  
+  /**
+   * Look up the KerberosInfo for a given protocol. It searches all known
+   * SecurityInfo providers.
+   * @param protocol the protocol class to get the information for
+   * @return the KerberosInfo or null if it has no KerberosInfo defined
+   */
+  public static KerberosInfo getKerberosInfo(Class<?> protocol) {
+    for(SecurityInfo provider: testProviders) {
+      KerberosInfo result = provider.getKerberosInfo(protocol);
+      if (result != null) {
+        return result;
+      }
+    }
+    for(SecurityInfo provider: securityInfoProviders) {
+      KerberosInfo result = provider.getKerberosInfo(protocol);
+      if (result != null) {
+        return result;
+      }
+    }
+    return null;
+  }
+ 
+  /**
+   * Look up the TokenInfo for a given protocol. It searches all known
+   * SecurityInfo providers.
+   * @param protocol The protocol class to get the information for.
+   * @return the TokenInfo or null if it has no KerberosInfo defined
+   */
+  public static TokenInfo getTokenInfo(Class<?> protocol) {
+    for(SecurityInfo provider: testProviders) {
+      TokenInfo result = provider.getTokenInfo(protocol);
+      if (result != null) {
+        return result;
+      }      
+    }
+    for(SecurityInfo provider: securityInfoProviders) {
+      TokenInfo result = provider.getTokenInfo(protocol);
+      if (result != null) {
+        return result;
+      }
+    } 
+    return null;
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/UserGroupInformation.java Wed Jul  6 20:45:21 2011
@@ -878,17 +878,21 @@ public class UserGroupInformation {
   private static class TestingGroups extends Groups {
     private final Map<String, List<String>> userToGroupsMapping = 
       new HashMap<String,List<String>>();
+    private Groups underlyingImplementation;
     
-    private TestingGroups() {
+    private TestingGroups(Groups underlyingImplementation) {
       super(new org.apache.hadoop.conf.Configuration());
+      this.underlyingImplementation = underlyingImplementation;
     }
     
     @Override
-    public List<String> getGroups(String user) {
+    public List<String> getGroups(String user) throws IOException {
       List<String> result = userToGroupsMapping.get(user);
+      
       if (result == null) {
-        result = new ArrayList<String>();
+        result = underlyingImplementation.getGroups(user);
       }
+
       return result;
     }
 
@@ -910,7 +914,7 @@ public class UserGroupInformation {
     UserGroupInformation ugi = createRemoteUser(user);
     // make sure that the testing object is setup
     if (!(groups instanceof TestingGroups)) {
-      groups = new TestingGroups();
+      groups = new TestingGroups(groups);
     }
     // add the user groups
     ((TestingGroups) groups).setUserGroups(ugi.getShortUserName(), userGroups);
@@ -936,7 +940,7 @@ public class UserGroupInformation {
     UserGroupInformation ugi = createProxyUser(user, realUser);
     // make sure that the testing object is setup
     if (!(groups instanceof TestingGroups)) {
-      groups = new TestingGroups();
+      groups = new TestingGroups(groups);
     }
     // add the user groups
     ((TestingGroups) groups).setUserGroups(ugi.getShortUserName(), userGroups);

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java Wed Jul  6 20:45:21 2011
@@ -41,8 +41,6 @@ import org.apache.hadoop.security.UserGr
 @InterfaceStability.Evolving
 public class ServiceAuthorizationManager {
   private static final String HADOOP_POLICY_FILE = "hadoop-policy.xml";
-  private static final Log LOG = LogFactory
-  .getLog(ServiceAuthorizationManager.class);
 
   private Map<Class<?>, AccessControlList> protocolToAcl =
     new IdentityHashMap<Class<?>, AccessControlList>();
@@ -86,7 +84,7 @@ public class ServiceAuthorizationManager
     }
     
     // get client principal key to verify (if available)
-    KerberosInfo krbInfo = protocol.getAnnotation(KerberosInfo.class);
+    KerberosInfo krbInfo = SecurityUtil.getKerberosInfo(protocol);
     String clientPrincipal = null; 
     if (krbInfo != null) {
       String clientKey = krbInfo.clientPrincipal();

Modified: hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java (original)
+++ hadoop/common/branches/HDFS-1073/common/src/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java Wed Jul  6 20:45:21 2011
@@ -373,8 +373,7 @@ extends AbstractDelegationTokenIdentifie
               rollMasterKey();
               lastMasterKeyUpdate = now;
             } catch (IOException e) {
-              LOG.error("Master key updating failed. "
-                  + StringUtils.stringifyException(e));
+              LOG.error("Master key updating failed: ", e);
             }
           }
           if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) {

Modified: hadoop/common/branches/HDFS-1073/common/src/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/native/Makefile.am?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/native/Makefile.am (original)
+++ hadoop/common/branches/HDFS-1073/common/src/native/Makefile.am Wed Jul  6 20:45:21 2011
@@ -34,6 +34,7 @@ export PLATFORM = $(shell echo $$OS_NAME
 ACLOCAL_AMFLAGS = -I m4 
 AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
               -Isrc/org/apache/hadoop/io/compress/zlib \
+              -Isrc/org/apache/hadoop/io/compress/snappy \
               -Isrc/org/apache/hadoop/security \
               -Isrc/org/apache/hadoop/io/nativeio/
 AM_LDFLAGS = @JNI_LDFLAGS@
@@ -46,6 +47,8 @@ endif
 lib_LTLIBRARIES = libhadoop.la
 libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
                        src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
+                       src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c \
+                       src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c \
                        src/org/apache/hadoop/security/getGroup.c \
                        src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
                        src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \

Modified: hadoop/common/branches/HDFS-1073/common/src/native/configure.ac
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/native/configure.ac?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/native/configure.ac (original)
+++ hadoop/common/branches/HDFS-1073/common/src/native/configure.ac Wed Jul  6 20:45:21 2011
@@ -88,6 +88,9 @@ AC_SUBST([JNI_CPPFLAGS])
 dnl Check for zlib headers
 AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
 
+dnl Check for snappy headers
+AC_CHECK_HEADERS([snappy-c.h], AC_COMPUTE_NEEDED_DSO(snappy,HADOOP_SNAPPY_LIBRARY), AC_MSG_WARN(Snappy headers were not found... building without snappy.))
+
 dnl Check for headers needed by the native Group resolution implementation
 AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
 

Modified: hadoop/common/branches/HDFS-1073/common/src/native/packageNativeHadoop.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1073/common/src/native/packageNativeHadoop.sh?rev=1143559&r1=1143558&r2=1143559&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1073/common/src/native/packageNativeHadoop.sh (original)
+++ hadoop/common/branches/HDFS-1073/common/src/native/packageNativeHadoop.sh Wed Jul  6 20:45:21 2011
@@ -62,4 +62,17 @@ then 
   done  
 fi
 
+if [ "${BUNDLE_SNAPPY_LIB}" = "true" ]
+then
+ if [ -d ${SNAPPY_LIB_DIR} ]
+ then
+   echo "Copying Snappy library in ${SNAPPY_LIB_DIR} to $DIST_LIB_DIR/"
+   cd ${SNAPPY_LIB_DIR}
+   $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
+ else
+   echo "Snappy lib directory ${SNAPPY_LIB_DIR} does not exist"
+   exit 1
+ fi
+fi
+
 #vim: ts=2: sw=2: et



Mime
View raw message