hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r1077554 - in /hadoop/common/branches/branch-0.20-security-patches: ./ src/hdfs/org/apache/hadoop/hdfs/ src/hdfs/org/apache/hadoop/hdfs/server/namenode/ src/test/aop/build/
Date Fri, 04 Mar 2011 04:28:35 GMT
Author: omalley
Date: Fri Mar  4 04:28:35 2011
New Revision: 1077554

URL: http://svn.apache.org/viewvc?rev=1077554&view=rev
Log:
commit d1bcf4d838078307eb7953e9393bde14cbd9de7e
Author: Tsz Wo Wo Sze <tsz@ucdev29.inktomisearch.com>
Date:   Sat Jul 17 00:37:41 2010 +0000

    HDFS-1085 from https://issues.apache.org/jira/secure/attachment/12449740/h1085_20100716d_y0.20.1xx.patch
    
    +++ b/YAHOO-CHANGES.txt
    +    HDFS-1085. Check file length and bytes read when reading a file through
    +    hftp in order to detect failure.  (szetszwo)
    +

Modified:
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077554&r1=1077553&r2=1077554&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 04:28:35 2011
@@ -26,7 +26,7 @@
   <property file="${user.home}/build.properties" />
   <property file="${basedir}/build.properties" />
  
-  <property name="Name" value="Hadoop"/>
+  <property name="Name" value="Yahoo! Distribution of Hadoop"/>
   <property name="name" value="hadoop"/>
   <property name="version" value="0.20.200.0-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
@@ -616,7 +616,6 @@
     <tar compression="gzip" destfile="${build.classes}/bin.tgz">
       <tarfileset dir="bin" mode="755"/>
     </tar>
-
     <property name="jar.properties.list"
       value="commons-logging.properties, log4j.properties, hadoop-metrics.properties"/>
     <jar jarfile="${build.dir}/${core.final.name}.jar"
@@ -625,7 +624,7 @@
         <section name="org/apache/hadoop">
           <attribute name="Implementation-Title" value="Hadoop"/>
           <attribute name="Implementation-Version" value="${version}"/>
-          <attribute name="Implementation-Vendor" value="Apache"/>
+          <attribute name="Implementation-Vendor" value="Yahoo!"/>
         </section>
       </manifest>
       <fileset dir="${conf.dir}" includes="${jar.properties.list}" />
@@ -640,13 +639,10 @@
   <!--                                                                    -->
   <!-- ================================================================== -->
   <target name="examples" depends="jar, compile-examples" description="Make the Hadoop
examples jar.">
-    <jar jarfile="${build.dir}/${examples.final.name}.jar"
-         basedir="${build.examples}">
-      <manifest>
-        <attribute name="Main-Class" 
-                   value="org/apache/hadoop/examples/ExampleDriver"/>
-      </manifest>
-    </jar>
+    <macro-jar-examples
+      build.dir="${build.dir}"
+      basedir="${build.examples}">
+    </macro-jar-examples>
   </target>
 
   <macrodef name="macro-jar-examples">
@@ -793,7 +789,7 @@
           <section name="org/apache/hadoop">
             <attribute name="Implementation-Title" value="Hadoop"/>
             <attribute name="Implementation-Version" value="${version}"/>
-            <attribute name="Implementation-Vendor" value="Apache"/>
+            <attribute name="Implementation-Vendor" value="Yahoo!"/>
           </section>
          </manifest>
     </jar>
@@ -882,8 +878,6 @@
   <!-- ================================================================== -->
   <!-- Run unit tests                                                     --> 
   <!-- ================================================================== -->
-  
-  <!-- Define a macro that will be used for fast and all test runs -->
   <macrodef name="macro-test-runner">
     <attribute name="test.file" />
     <attribute name="classpath" />
@@ -895,14 +889,14 @@
     <attribute name="exclude.list.id" default="empty.exclude.list.id" />
     <sequential>
       <delete file="${test.build.dir}/testsfailed"/>
-      <delete dir="${test.build.data}"/>
-      <mkdir dir="${test.build.data}"/>
-      <delete dir="${test.log.dir}"/>
-      <mkdir dir="${test.log.dir}"/>
-      <copy file="${test.src.dir}/hadoop-policy.xml" 
-        todir="${test.build.extraconf}" />
+      <delete dir="@{test.dir}/data" />
+      <mkdir dir="@{test.dir}/data" />
+      <delete dir="@{test.dir}/logs" />
+      <mkdir dir="@{test.dir}/logs" />
+      <copy file="${test.src.dir}/hadoop-policy.xml"
+            todir="@{test.dir}/extraconf" />
       <copy file="${test.src.dir}/fi-site.xml"
-        todir="${test.build.extraconf}" />
+            todir="@{test.dir}/extraconf" />
       <junit showoutput="${test.output}"
              printsummary="${test.junit.printsummary}"
              haltonfailure="${test.junit.haltonfailure}"
@@ -928,7 +922,7 @@
                      value="@{test.krb5.conf.filename}"/>
         <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
         <sysproperty key="java.library.path"
-             value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+                     value="${build.native}/lib:${lib.dir}/native/${build.platform}" />
         <sysproperty key="install.c++.examples"
                      value="${install.c++.examples}" />
         <sysproperty key="testjar"
@@ -938,20 +932,20 @@
                      value="@{hadoop.conf.dir.deployed}" />
         <!-- set io.compression.codec.lzo.class in the child jvm only if it is set -->
         <syspropertyset dynamic="no">
-          <propertyref name="io.compression.codec.lzo.class"/>
+          <propertyref name="io.compression.codec.lzo.class" />
         </syspropertyset>
         <!-- set compile.c++ in the child jvm only if it is set -->
         <syspropertyset dynamic="no">
-          <propertyref name="compile.c++"/>
+          <propertyref name="compile.c++" />
         </syspropertyset>
-        <classpath refid="${test.classpath.id}"/>
+        <classpath refid="@{classpath}" />
         <syspropertyset id="FaultProbabilityProperties">
-          <propertyref regex="fi.*"/>
+          <propertyref regex="fi.*" />
         </syspropertyset>
         <formatter type="${test.junit.output.format}" />
-        <batchtest todir="${test.build.dir}" if="tests.notestcase">
-          <fileset dir="${test.src.dir}" 
-            excludes="**/${test.exclude}.java aop/**">
+        <batchtest todir="@{test.dir}" if="tests.notestcase">
+          <fileset dir="@{fileset.dir}"
+                   excludes="**/${test.exclude}.java aop/** system/**">
             <patternset>
               <includesfile name="@{test.file}"/>
             </patternset>
@@ -960,20 +954,20 @@
         </batchtest>
         <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
           <fileset dir="${test.src.dir}/aop"
-            includes="**/${test.include}.java"
-            excludes="**/${test.exclude}.java" />
+                   includes="**/${test.include}.java"
+                   excludes="**/${test.exclude}.java" />
         </batchtest>
-        <batchtest todir="${test.build.dir}" if="tests.testcase">
-          <fileset dir="${test.src.dir}"
-            includes="**/${testcase}.java" excludes="aop/**"/>
+        <batchtest todir="@{test.dir}" if="tests.testcase">
+          <fileset dir="@{fileset.dir}"
+            includes="**/${testcase}.java" excludes="aop/** system/**"/>
         </batchtest>
         <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
-          <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+          <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java" />
         </batchtest>
         <!--The following batch is for very special occasions only when
-         a non-FI tests are needed to be executed against FI-environment -->
+                a non-FI tests are needed to be executed against FI-environment -->
         <batchtest todir="${test.build.dir}" if="tests.testcaseonly">
-          <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
+          <fileset dir="${test.src.dir}" includes="**/${testcase}.java" />
         </batchtest>
       </junit>
       <antcall target="checkfailure"/>
@@ -1190,7 +1184,7 @@
       use="true"
       windowtitle="${Name} ${version} API"
       doctitle="${Name} ${version} Developer API"
-      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
+      bottom="This release is based on the Yahoo! Distribution of Hadoop, powering the largest
Hadoop clusters in the Universe!&lt;br>Copyright &amp;copy; ${year} The Apache
Software Foundation."
       maxmemory="${javadoc.maxmemory}"
       >
         <packageset dir="${core.src.dir}"/>
@@ -1233,7 +1227,7 @@
       use="true"
       windowtitle="${Name} ${version} API"
       doctitle="${Name} ${version} API"
-      bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
+      bottom="This release is based on the Yahoo! Distribution of Hadoop, powering the largest
Hadoop clusters in the Universe!&lt;br>Copyright &amp;copy; ${year} The Apache
Software Foundation."
       maxmemory="${javadoc.maxmemory}"
       >
         <packageset dir="${core.src.dir}"/>
@@ -1570,7 +1564,7 @@
   <!-- ================================================================== -->
   <!-- Clean.  Delete the build files, and their directories              -->
   <!-- ================================================================== -->
-  <target name="clean" depends="clean-contrib, clean-sign" description="Clean.  Delete
the build files, and their directories">
+  <target name="clean" depends="clean-contrib, clean-sign, clean-fi" description="Clean.
 Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
     <delete dir="${docs.src}/build"/>
     <delete dir="${src.docs.cn}/build"/>

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=1077554&r1=1077553&r2=1077554&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
Fri Mar  4 04:28:35 2011
@@ -1526,7 +1526,7 @@ public class DFSClient implements FSCons
    * DFSInputStream provides bytes from a named file.  It handles 
    * negotiation of the namenode and various datanodes as necessary.
    ****************************************************************/
-  class DFSInputStream extends FSInputStream {
+  public class DFSInputStream extends FSInputStream {
     private Socket s = null;
     private boolean closed = false;
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1077554&r1=1077553&r2=1077554&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
Fri Mar  4 04:28:35 2011
@@ -52,6 +52,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.server.namenode.JspHelper;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.namenode.StreamFile;
 import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.RemoteException;
@@ -266,13 +267,33 @@ public class HftpFileSystem extends File
         "ugi=" + getUgiParameter());
     connection.setRequestMethod("GET");
     connection.connect();
+    final String cl = connection.getHeaderField(StreamFile.CONTENT_LENGTH);
+    final long filelength = cl == null? -1: Long.parseLong(cl);
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("filelength = " + filelength);
+    }
     final InputStream in = connection.getInputStream();
     return new FSDataInputStream(new FSInputStream() {
+        long currentPos = 0;
+
+        private void update(final boolean isEOF, final int n
+            ) throws IOException {
+          if (!isEOF) {
+            currentPos += n;
+          } else if (currentPos < filelength) {
+            throw new IOException("Got EOF but byteread = " + currentPos
+                + " < filelength = " + filelength);
+          }
+        }
         public int read() throws IOException {
-          return in.read();
+          final int b = in.read();
+          update(b == -1, 1);
+          return b;
         }
         public int read(byte[] b, int off, int len) throws IOException {
-          return in.read(b, off, len);
+          final int n = in.read(b, off, len);
+          update(n == -1, n);
+          return n;
         }
 
         public void close() throws IOException {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java?rev=1077554&r1=1077553&r2=1077554&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
(original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/StreamFile.java
Fri Mar  4 04:28:35 2011
@@ -27,12 +27,16 @@ import javax.servlet.http.HttpServletReq
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSInputStream;
 import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.security.UserGroupInformation;
 
 public class StreamFile extends DfsServlet {
+  /** for java.io.Serializable */
+  private static final long serialVersionUID = 1L;
+
+  public static final String CONTENT_LENGTH = "Content-Length";
+
   static InetSocketAddress nameNodeAddr;
   static DataNode datanode = null;
   static {
@@ -63,21 +67,30 @@ public class StreamFile extends DfsServl
       return;
     }
     
-    FSInputStream in = dfs.open(filename);
+    final DFSClient.DFSInputStream in = dfs.open(filename);
     OutputStream os = response.getOutputStream();
     response.setHeader("Content-Disposition", "attachment; filename=\"" + 
                        filename + "\"");
     response.setContentType("application/octet-stream");
+    response.setHeader(CONTENT_LENGTH, "" + in.getFileLength());
     byte buf[] = new byte[4096];
     try {
       int bytesRead;
       while ((bytesRead = in.read(buf)) != -1) {
         os.write(buf, 0, bytesRead);
       }
+    } catch(IOException e) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("response.isCommitted()=" + response.isCommitted(), e);
+      }
+      throw e;
     } finally {
-      in.close();
-      os.close();
-      dfs.close();
+      try {
+        in.close();
+        os.close();
+      } finally {
+        dfs.close();
+      }
     }
   }
 }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml?rev=1077554&r1=1077553&r2=1077554&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/aop/build/aop.xml Fri Mar
 4 04:28:35 2011
@@ -123,22 +123,6 @@
                        classpath="test.system.classpath"
                        test.dir="${system-test-build-dir}/test"
                        fileset.dir="${test.src.dir}/system/java"
-                       hadoop.home="${hadoop.home}"
-                       hadoop.conf.dir="${hadoop.conf.dir}">
-    </macro-test-runner>
-  </target>
-  <target name="-test-system-deployed"
-    depends="ivy-retrieve-common, prepare-test-system" 
-    if="hadoop.conf.dir.deployed">
-    <macro-jar-examples
-      build.dir="${system-test-build-dir}"
-      basedir="${system-test-build-dir}/examples">
-    </macro-jar-examples>
-    <macro-test-runner classpath="test.system.classpath"
-                       test.dir="${system-test-build-dir}/test"
-                       fileset.dir="${test.src.dir}/system/java"
-                       hadoop.home="${hadoop.home}"
-                       hadoop.conf.dir="${hadoop.conf.dir}"
                        hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
     </macro-test-runner>
   </target>



Mime
View raw message