hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1159782 [2/3] - in /hadoop/common/branches/HDFS-1623: ./ dev-support/ hadoop-assemblies/src/main/resources/assemblies/ hadoop-common/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/i...
Date Fri, 19 Aug 2011 20:49:35 GMT

Propchange: hadoop/common/branches/HDFS-1623/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1 +1 @@
-/hadoop/common/trunk:1152502-1158071
+/hadoop/common/trunk:1152502-1159756

Modified: hadoop/common/branches/HDFS-1623/.gitignore
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/.gitignore?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/.gitignore (original)
+++ hadoop/common/branches/HDFS-1623/.gitignore Fri Aug 19 20:47:40 2011
@@ -4,4 +4,6 @@
 .idea
 .svn
 .classpath
+.project
+.settings
 target

Modified: hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh Fri Aug 19 20:47:40 2011
@@ -59,15 +59,15 @@ PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sor
 if [[ -d hadoop-common ]]; then
   echo Looks like this is being run at project root
 
-# if all of the lines start with hadoop-common/, hdfs/, or mapreduce/, this is
+# if all of the lines start with hadoop-common/, hadoop-hdfs/, or mapreduce/, this is
 # relative to the hadoop root instead of the subproject root, so we need
 # to chop off another layer
-elif [[ "$PREFIX_DIRS" =~ ^(hdfs|hadoop-common|mapreduce)$ ]]; then
+elif [[ "$PREFIX_DIRS" =~ ^(hadoop-hdfs|hadoop-common|mapreduce)$ ]]; then
 
   echo Looks like this is relative to project root. Increasing PLEVEL
   PLEVEL=$[$PLEVEL + 1]
 
-elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hdfs\|mapreduce' ; then
+elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hadoop-hdfs\|mapreduce' ; then
   echo Looks like this is a cross-subproject patch. Try applying from the project root
   exit 1
 fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-bintar.xml Fri Aug 19 20:47:40 2011
@@ -24,9 +24,9 @@
     <fileSet>
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/bin</outputDirectory>
-      <includes>
-        <include>hadoop</include>
-      </includes>
+      <excludes>
+        <exclude>*.sh</exclude>
+      </excludes>
       <fileMode>0755</fileMode>
     </fileSet>
     <fileSet>
@@ -37,7 +37,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -68,7 +68,7 @@
       </includes>
     </fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/webapps</directory>
+      <directory>${project.build.directory}/webapps</directory>
       <outputDirectory>/share/hadoop/${hadoop.component}/webapps</outputDirectory>
     </fileSet>
     <fileSet>
@@ -101,7 +101,7 @@
     <dependencySet>
       <outputDirectory>/share/hadoop/${hadoop.component}/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>

Modified: hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-tar.xml Fri Aug 19 20:47:40 2011
@@ -40,7 +40,7 @@
       <directory>${basedir}/src/main/bin</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>hadoop-config.sh</include>
+        <include>*-config.sh</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -51,6 +51,16 @@
     <fileSet>
       <directory>${basedir}/src/main/webapps</directory>
       <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <directory>${project.build.directory}/webapps</directory>
+      <outputDirectory>/webapps</outputDirectory>
+      <excludes>
+        <exclude>proto-*-web.xml</exclude>
+      </excludes>
     </fileSet>
     <fileSet>
       <directory>${project.build.directory}/site</directory>
@@ -73,11 +83,11 @@
     <dependencySet>
       <outputDirectory>/lib</outputDirectory>
       <unpack>false</unpack>
-      <scope>compile</scope>
+      <scope>runtime</scope>
       <useProjectArtifact>false</useProjectArtifact>
       <excludes>
         <exclude>org.apache.ant:*:jar</exclude>
-        <exclude>org.apache.hadoop:hadoop-*:jar</exclude>
+        <exclude>org.apache.hadoop:hadoop-*:*:*:*</exclude>
         <exclude>jdiff:jdiff:jar</exclude>
       </excludes>
     </dependencySet>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt Fri Aug 19 20:47:40 2011
@@ -324,6 +324,13 @@ Trunk (unreleased changes)
 
     HADOOP-7531. Add servlet util methods for handling paths in requests. (eli)
 
+    HADOOP-7493. Add ShortWritable.  (Uma Maheswara Rao G via szetszwo)
+
+    HADOOP-7555. Add a eclipse-generated files to .gitignore. (atm)
+
+    HADOOP-7264. Bump avro version to at least 1.4.1. (Alejandro Abdelnur via
+    tomwhite)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -498,6 +505,9 @@ Trunk (unreleased changes)
     HADOOP-7545. Common -tests JAR should not include properties and configs.
     (todd)
 
+    HADOOP-7536. Correct the dependency version regressions introduced in
+    HADOOP-6671. (Alejandro Abdelnur via tomwhite)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common/CHANGES.txt:1153185-1158071
+/hadoop/common/trunk/hadoop-common/CHANGES.txt:1153185-1159756
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml Fri Aug 19 20:47:40 2011
@@ -16,9 +16,9 @@
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-project</artifactId>
+    <artifactId>hadoop-project-distro</artifactId>
     <version>0.23.0-SNAPSHOT</version>
-    <relativePath>../hadoop-project</relativePath>
+    <relativePath>../hadoop-project-distro</relativePath>
   </parent>
   <groupId>org.apache.hadoop</groupId>
   <artifactId>hadoop-common</artifactId>
@@ -28,18 +28,12 @@
   <packaging>jar</packaging>
 
   <properties>
-    <test.build.data>${project.build.directory}/test/data</test.build.data>
-    <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
-    <test.build.webapps>${project.build.directory}/test-classes/webapps</test.build.webapps>
-    <test.cache.data>${project.build.directory}/test-classes</test.cache.data>
-    <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
-
-    <build.platform>${os.name}-${os.arch}-${sun.arch.data.model}</build.platform>
     <snappy.prefix>/usr/local</snappy.prefix>
     <snappy.lib>${snappy.prefix}/lib</snappy.lib>
     <bundle.snappy>false</bundle.snappy>
     
     <hadoop.component>common</hadoop.component>
+    <is.hadoop.component>true</is.hadoop.component>
   </properties>
 
   <dependencies>
@@ -219,11 +213,16 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
+      <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
       <scope>compile</scope>
     </dependency>
     <dependency>
+      <groupId>org.apache.avro</groupId>
+      <artifactId>avro-ipc</artifactId>
+      <scope>compile</scope>
+    </dependency>
+    <dependency>
       <groupId>net.sf.kosmosfs</groupId>
       <artifactId>kfs</artifactId>
       <scope>compile</scope>
@@ -243,83 +242,20 @@
   <build>
     <plugins>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkMode>always</forkMode>
-          <forkedProcessTimeoutInSeconds>600</forkedProcessTimeoutInSeconds>
-          <argLine>-Xmx1024m</argLine>
-          <environmentVariables>
-            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
-          </environmentVariables>
-          <systemPropertyVariables>
-
-            <!-- TODO: all references in testcases should be updated to this default -->
-            <test.build.data>${test.build.data}</test.build.data>
-            <test.build.webapps>${test.build.webapps}</test.build.webapps>
-            <test.cache.data>${test.cache.data}</test.cache.data>
-            <hadoop.log.dir>${hadoop.log.dir}</hadoop.log.dir>
-            <test.build.classes>${test.build.classes}</test.build.classes>
-
-            <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
-            <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
-          </systemPropertyVariables>
-          <includes>
-            <include>**/Test*.java</include>
-          </includes>
-          <excludes>
-            <exclude>**/${test.exclude}.java</exclude>
-            <exclude>${test.exclude.pattern}</exclude>
-            <exclude>**/Test*$*.java</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>prepare-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-          </execution>
-          <execution>
-            <id>prepare-test-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>test-jar</goal>
-            </goals>
-            <configuration>
-              <includes>
-                <include>**/*.class</include>
-              </includes>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-source-plugin</artifactId>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-maven-plugin</artifactId>
         <executions>
           <execution>
-            <phase>prepare-package</phase>
+            <id>generate-avro-test-sources</id>
+            <phase>generate-test-sources</phase>
             <goals>
-              <goal>jar</goal>
-              <goal>test-jar</goal>
+              <goal>schema</goal>
+              <goal>protocol</goal>
             </goals>
           </execution>
         </executions>
         <configuration>
-          <attach>true</attach>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>findbugs-maven-plugin</artifactId>
-        <configuration>
-          <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
+          <testOutputDirectory>${project.build.directory}/generated-test-sources/java</testOutputDirectory>
         </configuration>
       </plugin>
       <plugin>
@@ -359,24 +295,6 @@
                 <recordcc destdir="${project.build.directory}/generated-test-sources/java">
                   <fileset dir="${basedir}/src/test/ddl" includes="**/*.jr"/>
                 </recordcc>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.SchemaTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avsc"/>
-                  </fileset>
-                </schema>
-
-                <taskdef name="schema" classname="org.apache.avro.specific.ProtocolTask">
-                  <classpath refid="maven.test.classpath"/>
-                </taskdef>
-                <schema destdir="${project.build.directory}/generated-test-sources/java">
-                  <fileset dir="${basedir}/src/test">
-                    <include name="**/*.avpr"/>
-                  </fileset>
-                </schema>
               </target>
             </configuration>
           </execution>
@@ -434,17 +352,6 @@
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-checkstyle-plugin</artifactId>
-        <configuration>
-          <configLocation>file://${basedir}/dev-support/checkstyle.xml</configLocation>
-          <failOnViolation>false</failOnViolation>
-          <format>xml</format>
-          <format>html</format>
-          <outputFile>${project.build.directory}/test/checkstyle-errors.xml</outputFile>
-        </configuration>
-      </plugin>
-      <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>
@@ -463,43 +370,6 @@
           </excludes>
         </configuration>
       </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-javadoc-plugin</artifactId>
-        <configuration>
-          <linksource>true</linksource>
-          <quiet>true</quiet>
-          <verbose>false</verbose>
-          <source>${maven.compile.source}</source>
-          <charset>${maven.compile.encoding}</charset>
-          <reportOutputDirectory>${project.build.directory}/site</reportOutputDirectory>
-          <destDir>api</destDir>
-          <groups>
-            <group>
-              <title>${project.name} API</title>
-              <packages>org.apache.hadoop*</packages>
-            </group>
-          </groups>
-          <doclet>org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsStandardDoclet</doclet>
-          <docletArtifacts>
-            <docletArtifact>
-              <groupId>org.apache.hadoop</groupId>
-              <artifactId>hadoop-annotations</artifactId>
-              <version>${project.version}</version>
-            </docletArtifact>
-          </docletArtifacts>
-          <useStandardDocletOptions>true</useStandardDocletOptions>
-
-          <!-- switch on dependency-driven aggregation -->
-          <includeDependencySources>true</includeDependencySources>
-
-          <dependencySourceIncludes>
-            <!-- include ONLY dependencies I control -->
-            <dependencySourceInclude>org.apache.hadoop:hadoop-annotations</dependencySourceInclude>
-          </dependencySourceIncludes>
-
-        </configuration>
-      </plugin>
     </plugins>
   </build>
 
@@ -636,427 +506,5 @@
         </plugins>
       </build>
     </profile>
-
-    <profile>
-      <id>docs</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <jdiff.stable.api>0.20.2</jdiff.stable.api>
-        <jdiff.stability>-unstable</jdiff.stability>
-        <jdiff.compatibility></jdiff.compatibility>
-        <jdiff.javadoc.maxmemory>512m</jdiff.javadoc.maxmemory>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-javadoc-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>javadoc</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.codehaus.mojo</groupId>
-            <artifactId>findbugs-maven-plugin</artifactId>
-            <executions>
-              <execution>
-                <goals>
-                  <goal>findbugs</goal>
-                </goals>
-                <phase>prepare-package</phase>
-              </execution>
-            </executions>
-            <configuration>
-              <excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
-            </configuration>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-dependency-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>copy</goal>
-                </goals>
-                <configuration>
-                  <artifactItems>
-                    <artifactItem>
-                      <groupId>jdiff</groupId>
-                      <artifactId>jdiff</artifactId>
-                      <version>${jdiff.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>jdiff.jar</destFileName>
-                    </artifactItem>
-                    <artifactItem>
-                      <groupId>org.apache.hadoop</groupId>
-                      <artifactId>hadoop-annotations</artifactId>
-                      <version>${hadoop.annotations.version}</version>
-                      <overWrite>false</overWrite>
-                      <outputDirectory>${project.build.directory}</outputDirectory>
-                      <destFileName>hadoop-annotations.jar</destFileName>
-                    </artifactItem>
-                  </artifactItems>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>site</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-
-                    <mkdir dir="${project.build.directory}/docs-src"/>
-
-                    <copy todir="${project.build.directory}/docs-src">
-                      <fileset dir="${basedir}/src/main/docs"/>
-                    </copy>
-
-                    <!-- Docs -->
-                    <exec dir="${project.build.directory}/docs-src"
-                          executable="${env.FORREST_HOME}/bin/forrest"
-                          failonerror="true">
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/build/site"/>
-                    </copy>
-                    <copy file="${project.build.directory}/docs-src/releasenotes.html"
-                          todir="${project.build.directory}/site"/>
-                    <style basedir="${basedir}/src/main/resources"
-                           destdir="${project.build.directory}/site"
-                           includes="core-default.xml"
-                           style="${basedir}/src/main/xsl/configuration.xsl"/>
-
-                    <!-- Convert 'CHANGES.txt' to 'changes.html" -->
-                    <exec executable="perl" input="${basedir}/../CHANGES.txt"
-                          output="${project.build.directory}/site/changes.html"
-                          failonerror="true">
-                      <arg value="${project.build.directory}/docs-src/changes/changes2html.pl"/>
-                    </exec>
-                    <copy todir="${project.build.directory}/site">
-                      <fileset dir="${project.build.directory}/docs-src/changes" includes="*.css"/>
-                    </copy>
-
-                    <!-- Jdiff -->
-                    <mkdir dir="${project.build.directory}/site/jdiff/xml"/>
-
-                    <javadoc maxmemory="${jdiff.javadoc.maxmemory}" verbose="yes">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-apidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-apiname" value="hadoop-core ${project.version}"/>
-                        <param name="${jdiff.stability}"/>
-                      </doclet>
-                      <packageset dir="${basedir}/src/main/java"/>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-                    <javadoc sourcepath="${basedir}/src/main/java"
-                             destdir="${project.build.directory}/site/jdiff/xml"
-                             sourceFiles="${basedir}/dev-support/jdiff/Null.java"
-                             maxmemory="${jdiff.javadoc.maxmemory}">
-                      <doclet name="org.apache.hadoop.classification.tools.ExcludePrivateAnnotationsJDiffDoclet"
-                              path="${project.build.directory}/hadoop-annotations.jar:${project.build.directory}/jdiff.jar">
-                        <param name="-oldapi" value="hadoop-core ${jdiff.stable.api}"/>
-                        <param name="-newapi" value="hadoop-core ${project.version}"/>
-                        <param name="-oldapidir" value="${basedir}/dev-support/jdiff"/>
-                        <param name="-newapidir" value="${project.build.directory}/site/jdiff/xml"/>
-                        <param name="-javadocold"
-                               value="http://hadoop.apache.org/docs/${jdiff.stable.api}/api/"/>
-                        <param name="-javadocnew" value="${project.build.directory}/site/api"/>
-                        <param name="-stats"/>
-                        <param name="${jdiff.stability}"/>
-                        <param name="${jdiff.compatibility}"/>
-                      </doclet>
-                      <classpath>
-                        <path refid="maven.compile.classpath"/>
-                      </classpath>
-                    </javadoc>
-
-                    <xslt style="${env.FINDBUGS_HOME}/src/xsl/default.xsl"
-                          in="${project.build.directory}/findbugsXml.xml"
-                          out="${project.build.directory}/site/findbugs.html"/>
-
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>src</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar-src</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-src</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>tar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>tar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/tar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./tar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-tar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-tar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>bintar</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-copynativelibs.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      TAR='tar cf -'
-                      UNTAR='tar xfBp -'
-                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
-                      if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}-bin/lib"
-                      mkdir -p $${TARGET_DIR}
-                      cd $${LIB_DIR}
-                      $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      if [ "${bundle.snappy}" = "true" ] ; then
-                      cd ${snappy.lib}
-                      $$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
-                      fi
-                      fi
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-copynativelibs.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>bintar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target>
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/bintar-maketar.sh">
-
-                      which cygpath 2> /dev/null
-                      if [ $? = 1 ]; then
-                        BUILD_DIR="${project.build.directory}"
-                      else
-                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                      fi
-                      cd ${BUILD_DIR}
-                      tar czf ${project.artifactId}-${project.version}-bin.tar.gz ${project.artifactId}-${project.version}-bin
-                    </echo>
-                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
-                      <arg line="./bintar-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${hadoop.assemblies.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>pre-bintar</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  <finalName>${project.artifactId}-${project.version}-bin</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-bintar</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
   </profiles>
 </project>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common/src/main/docs:1152502-1158071
+/hadoop/common/trunk/hadoop-common/src/main/docs:1152502-1159756
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common/src/main/java:1152502-1158071
+/hadoop/common/trunk/hadoop-common/src/main/java:1152502-1159756
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java Fri Aug 19 20:47:40 2011
@@ -28,6 +28,7 @@ import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DatumReader;
 import org.apache.avro.io.DatumWriter;
 import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configured;
@@ -93,7 +94,7 @@ public abstract class AvroSerialization<
     @Override
     public void open(OutputStream out) throws IOException {
       outStream = out;
-      encoder = new BinaryEncoder(out);
+      encoder = EncoderFactory.get().binaryEncoder(out, encoder);
     }
 
     @Override
@@ -127,7 +128,7 @@ public abstract class AvroSerialization<
     @Override
     public void open(InputStream in) throws IOException {
       inStream = in;
-      decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
+      decoder = DecoderFactory.get().binaryDecoder(in, decoder);
     }
 
   }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java Fri Aug 19 20:47:40 2011
@@ -34,9 +34,9 @@ import javax.net.SocketFactory;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.reflect.ReflectRequestor;
-import org.apache.avro.reflect.ReflectResponder;
-import org.apache.avro.specific.SpecificRequestor;
+import org.apache.avro.ipc.reflect.ReflectRequestor;
+import org.apache.avro.ipc.reflect.ReflectResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceStability;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java Fri Aug 19 20:47:40 2011
@@ -22,8 +22,8 @@ import java.io.IOException;
 
 import org.apache.avro.ipc.Responder;
 import org.apache.avro.ipc.Transceiver;
-import org.apache.avro.specific.SpecificRequestor;
-import org.apache.avro.specific.SpecificResponder;
+import org.apache.avro.ipc.specific.SpecificRequestor;
+import org.apache.avro.ipc.specific.SpecificResponder;
 import org.apache.hadoop.classification.InterfaceStability;
 
 /**

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common/src/test/core:1152502-1158071
+/hadoop/common/trunk/hadoop-common/src/test/core:1152502-1159756
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java Fri Aug 19 20:47:40 2011
@@ -18,15 +18,16 @@
 
 package org.apache.hadoop.io;
 
-import java.io.IOException;
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.lang.reflect.Type;
 
 import org.apache.avro.Schema;
+import org.apache.avro.io.BinaryEncoder;
+import org.apache.avro.io.EncoderFactory;
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumWriter;
 import org.apache.avro.reflect.ReflectDatumReader;
-import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DecoderFactory;
 
 import static junit.framework.TestCase.assertEquals;
@@ -47,11 +48,11 @@ public class AvroTestUtil {
     // check that value is serialized correctly
     ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
-    writer.write(value, new BinaryEncoder(out));
+    writer.write(value, EncoderFactory.get().directBinaryEncoder(out, null));
     ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
     Object after =
-      reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder(
-          out.toByteArray(), null));
+      reader.read(null,
+                  DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
     assertEquals(value, after);
   }
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java:1153185-1158071
+/hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java:1153185-1159756
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java Fri Aug 19 20:47:40 2011
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.io;
 
-import java.io.*;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
 import java.util.Random;
 
-import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.util.ReflectionUtils;
 
@@ -68,6 +69,10 @@ public class TestWritable extends TestCa
   public void testByteWritable() throws Exception {
     testWritable(new ByteWritable((byte)128));
   }
+  
+  public void testShortWritable() throws Exception {
+    testWritable(new ShortWritable((byte)256));
+  }
 
   public void testDoubleWritable() throws Exception {
     testWritable(new DoubleWritable(1.0));
@@ -104,13 +109,13 @@ public class TestWritable extends TestCa
     }
   }
 
-  private static class Frob implements WritableComparable {
+  private static class Frob implements WritableComparable<Frob> {
     static {                                     // register default comparator
       WritableComparator.define(Frob.class, new FrobComparator());
     }
     @Override public void write(DataOutput out) throws IOException {}
     @Override public void readFields(DataInput in) throws IOException {}
-    @Override public int compareTo(Object o) { return 0; }
+    @Override public int compareTo(Frob o) { return 0; }
   }
 
   /** Test that comparator is defined. */
@@ -118,5 +123,31 @@ public class TestWritable extends TestCa
     assert(WritableComparator.get(Frob.class) instanceof FrobComparator);
   }
 
+  /**
+   * Test a user comparator that relies on deserializing both arguments for each
+   * compare.
+   */
+  public void testShortWritableComparator() throws Exception {
+    ShortWritable writable1 = new ShortWritable((short)256);
+    ShortWritable writable2 = new ShortWritable((short) 128);
+    ShortWritable writable3 = new ShortWritable((short) 256);
+    
+    final String SHOULD_NOT_MATCH_WITH_RESULT_ONE = "Result should be 1, should not match the writables";
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE,
+        writable1.compareTo(writable2) == 1);
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE, WritableComparator.get(
+        ShortWritable.class).compare(writable1, writable2) == 1);
+
+    final String SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE = "Result should be -1, should not match the writables";
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, writable2
+        .compareTo(writable1) == -1);
+    assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, WritableComparator.get(
+        ShortWritable.class).compare(writable2, writable1) == -1);
+
+    final String SHOULD_MATCH = "Result should be 0, should match the writables";
+    assertTrue(SHOULD_MATCH, writable1.compareTo(writable1) == 0);
+    assertTrue(SHOULD_MATCH, WritableComparator.get(ShortWritable.class)
+        .compare(writable1, writable3) == 0);
+  }
 
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/AvroTestProtocol.java Fri Aug 19 20:47:40 2011
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.ipc;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 
 @SuppressWarnings("serial")
 public interface AvroTestProtocol {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java?rev=1159782&r1=1159781&r2=1159782&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java Fri Aug 19 20:47:40 2011
@@ -28,7 +28,7 @@ import javax.security.sasl.Sasl;
 import junit.framework.Assert;
 import junit.framework.TestCase;
 
-import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.AvroRemoteException;
 import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -189,7 +189,7 @@ public class TestAvroRpc extends TestCas
         (AvroSpecificTestProtocol)RPC.getProxy(AvroSpecificTestProtocol.class, 
             0, addr, conf);
       
-      Utf8 echo = proxy.echo(new Utf8("hello world"));
+      CharSequence echo = proxy.echo("hello world");
       assertEquals("hello world", echo.toString());
 
       int intResult = proxy.add(1, 2);
@@ -210,7 +210,7 @@ public class TestAvroRpc extends TestCas
     }
 
     @Override
-    public Utf8 echo(Utf8 msg) throws AvroRemoteException {
+    public CharSequence echo(CharSequence msg) throws AvroRemoteException {
       return msg;
     }
     

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+build
+build-fi
+build.properties
+logs
+.classpath
+.externalToolBuilders
+.launches
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,5 @@
+/hadoop/common/trunk/hadoop-hdfs:1152502-1159756
+/hadoop/core/branches/branch-0.19/hdfs:713112
+/hadoop/hdfs/branches/HDFS-1052:987665-1095512
+/hadoop/hdfs/branches/HDFS-265:796829-820463
+/hadoop/hdfs/branches/branch-0.21:820487

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/CHANGES.HDFS-1623.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/CHANGES.HDFS-1623.txt?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/CHANGES.HDFS-1623.txt (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/CHANGES.HDFS-1623.txt Fri Aug 19 20:47:40 2011
@@ -0,0 +1,9 @@
+Changes for HDFS-1623 branch.
+
+This change list will be merged into the trunk CHANGES.txt when the HDFS-1623
+branch is merged.
+------------------------------
+
+HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)
+
+HDFS-1974. Introduce active and standy states to the namenode. (suresh)

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/ivy/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,4 @@
+hadoop-hdfs.xml
+hadoop-hdfs-test.xml
+ivy-*.jar
+maven-ant-tasks*.jar

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/pom.xml (from r1159756, hadoop/common/trunk/hadoop-hdfs/pom.xml)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/pom.xml?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/pom.xml&p1=hadoop/common/trunk/hadoop-hdfs/pom.xml&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/pom.xml Fri Aug 19 20:47:40 2011
@@ -109,6 +109,10 @@
       <artifactId>ant</artifactId>
       <scope>provided</scope>
     </dependency>
+    <dependency>
+      <groupId>com.jcraft</groupId>
+      <artifactId>jsch</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
@@ -277,6 +281,7 @@
         <configuration>
           <excludes>
             <exclude>CHANGES.txt</exclude>
+            <exclude>CHANGES.HDFS-1623.txt</exclude>
             <exclude>.idea/**</exclude>
             <exclude>src/main/conf/*</exclude>
             <exclude>src/main/docs/**</exclude>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/conf/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+masters
+slaves
+hadoop-env.sh
+hadoop-site.xml
+core-site.xml
+mapred-site.xml
+hdfs-site.xml
+hadoop-policy.xml
+capacity-scheduler.xml
+mapred-queue-acls.xml

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/docs/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Aug 19 20:47:40 2011
@@ -0,0 +1 @@
+build

Propchange: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (added)
+++ svn:mergeinfo Fri Aug 19 20:47:40 2011
@@ -0,0 +1,10 @@
+/hadoop/common/trunk/hadoop-hdfs/src/main/java:1158072-1159756
+/hadoop/core/branches/branch-0.19/hdfs/src/java:713112
+/hadoop/core/branches/branch-0.19/hdfs/src/main/java:713112
+/hadoop/core/trunk/src/hdfs:776175-785643,785929-786278
+/hadoop/hdfs/branches/HDFS-1052/src/java:987665-1095512
+/hadoop/hdfs/branches/HDFS-1052/src/main/java:987665-1095512
+/hadoop/hdfs/branches/HDFS-265/src/java:796829-820463
+/hadoop/hdfs/branches/HDFS-265/src/main/java:796829-820463
+/hadoop/hdfs/branches/branch-0.21/src/java:820487
+/hadoop/hdfs/branches/branch-0.21/src/main/java:820487

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (from r1159756, hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java&p1=hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Fri Aug 19 20:47:40 2011
@@ -696,4 +696,14 @@ public class DFSUtil {
         ClientDatanodeProtocol.versionID, addr, ticket, confWithNoIpcIdle,
         NetUtils.getDefaultSocketFactory(conf), socketTimeout);
   }
+
+  /**
+   * Returns true if HA for namenode is configured.
+   * @param conf Configuration
+   * @return true if HA is configured in the configuration; else false.
+   */
+  public static boolean isHAEnabled(Configuration conf) {
+    // TODO:HA configuration changes pending
+    return false;
+  }
 }

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (from r1159756, hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java&p1=hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java Fri Aug 19 20:47:40 2011
@@ -189,34 +189,6 @@ public class BackupNode extends NameNode
   }
 
   /////////////////////////////////////////////////////
-  // NamenodeProtocol implementation for backup node.
-  /////////////////////////////////////////////////////
-  @Override // NamenodeProtocol
-  public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
-  throws IOException {
-    throw new UnsupportedActionException("getBlocks");
-  }
-
-  // Only active name-node can register other nodes.
-  @Override // NamenodeProtocol
-  public NamenodeRegistration register(NamenodeRegistration registration
-  ) throws IOException {
-    throw new UnsupportedActionException("register");
-  }
-
-  @Override // NamenodeProtocol
-  public NamenodeCommand startCheckpoint(NamenodeRegistration registration)
-  throws IOException {
-    throw new UnsupportedActionException("startCheckpoint");
-  }
-
-  @Override // NamenodeProtocol
-  public void endCheckpoint(NamenodeRegistration registration,
-                            CheckpointSignature sig) throws IOException {
-    throw new UnsupportedActionException("endCheckpoint");
-  }  
-
-  /////////////////////////////////////////////////////
   // BackupNodeProtocol implementation for backup node.
   /////////////////////////////////////////////////////
 
@@ -224,6 +196,7 @@ public class BackupNode extends NameNode
   public void journal(NamenodeRegistration nnReg,
       long firstTxId, int numTxns,
       byte[] records) throws IOException {
+    checkOperation(OperationCategory.JOURNAL);
     verifyRequest(nnReg);
     if(!nnRpcAddress.equals(nnReg.getAddress()))
       throw new IOException("Journal request from unexpected name-node: "
@@ -234,6 +207,7 @@ public class BackupNode extends NameNode
   @Override
   public void startLogSegment(NamenodeRegistration registration, long txid)
       throws IOException {
+    checkOperation(OperationCategory.JOURNAL);
     verifyRequest(registration);
   
     getBNImage().namenodeStartedLogSegment(txid);
@@ -369,4 +343,14 @@ public class BackupNode extends NameNode
   String getClusterId() {
     return clusterId;
   }
+  
+  @Override // NameNode
+  protected void checkOperation(OperationCategory op)
+      throws UnsupportedActionException {
+    if (OperationCategory.JOURNAL != op) {
+      String msg = "Operation category " + op
+          + " is not supported at the BackupNode";
+      throw new UnsupportedActionException(msg);
+    }
+  }
 }

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (from r1159756, hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java&p1=hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Fri Aug 19 20:47:40 2011
@@ -31,6 +31,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ha.HealthCheckFailedException;
+import org.apache.hadoop.ha.ServiceFailedException;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.CreateFlag;
@@ -72,11 +74,15 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
 import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
 import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
+import org.apache.hadoop.hdfs.server.namenode.ha.ActiveState;
+import org.apache.hadoop.hdfs.server.namenode.ha.HAState;
+import org.apache.hadoop.hdfs.server.namenode.ha.StandbyState;
 import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics;
 import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
+import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
@@ -151,6 +157,20 @@ public class NameNode implements Namenod
   }
   
   /**
+   * Categories of operations supported by the namenode.
+   */
+  public static enum OperationCategory {
+    /** Read operation that does not change the namespace state */
+    READ,
+    /** Write operation that changes the namespace state */
+    WRITE,
+    /** Operations related to checkpointing */
+    CHECKPOINT,
+    /** Operations related to {@link JournalProtocol} */
+    JOURNAL
+  }
+  
+  /**
    * HDFS federation configuration can have two types of parameters:
    * <ol>
    * <li>Parameter that is common for all the name services in the cluster.</li>
@@ -209,9 +229,15 @@ public class NameNode implements Namenod
 
   public static final Log LOG = LogFactory.getLog(NameNode.class.getName());
   public static final Log stateChangeLog = LogFactory.getLog("org.apache.hadoop.hdfs.StateChange");
+  public static final HAState ACTIVE_STATE = new ActiveState();
+  public static final HAState STANDBY_STATE = new StandbyState();
   
   protected FSNamesystem namesystem; 
   protected NamenodeRole role;
+  private HAState state;
+  private final boolean haEnabled;
+
+  
   /** RPC server. Package-protected for use in tests. */
   Server server;
   /** RPC server for HDFS Services communication.
@@ -407,6 +433,7 @@ public class NameNode implements Namenod
    * @param conf the configuration
    */
   protected void initialize(Configuration conf) throws IOException {
+    initializeGenericKeys(conf);
     InetSocketAddress socAddr = getRpcServerAddress(conf);
     UserGroupInformation.setConfiguration(conf);
     loginAsNameNodeUser(conf);
@@ -455,10 +482,6 @@ public class NameNode implements Namenod
     }
 
     activate(conf);
-    LOG.info(getRole() + " up at: " + rpcAddress);
-    if (serviceRPCAddress != null) {
-      LOG.info(getRole() + " service server is up at: " + serviceRPCAddress); 
-    }
   }
   
   /**
@@ -508,6 +531,10 @@ public class NameNode implements Namenod
         LOG.warn("ServicePlugin " + p + " could not be started", t);
       }
     }
+    LOG.info(getRole() + " up at: " + rpcAddress);
+    if (serviceRPCAddress != null) {
+      LOG.info(getRole() + " service server is up at: " + serviceRPCAddress); 
+    }
   }
 
   private void startTrashEmptier(Configuration conf) throws IOException {
@@ -561,8 +588,9 @@ public class NameNode implements Namenod
   protected NameNode(Configuration conf, NamenodeRole role) 
       throws IOException { 
     this.role = role;
+    this.haEnabled = DFSUtil.isHAEnabled(conf);
+    this.state = !haEnabled ? ACTIVE_STATE : STANDBY_STATE;
     try {
-      initializeGenericKeys(conf);
       initialize(conf);
     } catch (IOException e) {
       this.stop();
@@ -643,6 +671,7 @@ public class NameNode implements Namenod
   public void errorReport(NamenodeRegistration registration,
                           int errorCode, 
                           String msg) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     verifyRequest(registration);
     LOG.info("Error report from " + registration + ": " + msg);
     if(errorCode == FATAL)
@@ -670,27 +699,28 @@ public class NameNode implements Namenod
   @Override // NamenodeProtocol
   public void endCheckpoint(NamenodeRegistration registration,
                             CheckpointSignature sig) throws IOException {
-    verifyRequest(registration);
-    if(!isRole(NamenodeRole.NAMENODE))
-      throw new IOException("Only an ACTIVE node can invoke endCheckpoint.");
+    checkOperation(OperationCategory.CHECKPOINT);
     namesystem.endCheckpoint(registration, sig);
   }
 
   @Override // ClientProtocol
   public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     return namesystem.getDelegationToken(renewer);
   }
 
   @Override // ClientProtocol
   public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
       throws InvalidToken, IOException {
+    checkOperation(OperationCategory.WRITE);
     return namesystem.renewDelegationToken(token);
   }
 
   @Override // ClientProtocol
   public void cancelDelegationToken(Token<DelegationTokenIdentifier> token)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.cancelDelegationToken(token);
   }
   
@@ -699,6 +729,7 @@ public class NameNode implements Namenod
                                           long offset, 
                                           long length) 
       throws IOException {
+    checkOperation(OperationCategory.READ);
     metrics.incrGetBlockLocations();
     return namesystem.getBlockLocations(getClientMachine(), 
                                         src, offset, length);
@@ -717,6 +748,7 @@ public class NameNode implements Namenod
                      boolean createParent,
                      short replication,
                      long blockSize) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     String clientMachine = getClientMachine();
     if (stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.create: file "
@@ -737,6 +769,7 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public LocatedBlock append(String src, String clientName) 
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     String clientMachine = getClientMachine();
     if (stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.append: file "
@@ -749,6 +782,7 @@ public class NameNode implements Namenod
 
   @Override // ClientProtocol
   public boolean recoverLease(String src, String clientName) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     String clientMachine = getClientMachine();
     return namesystem.recoverLease(src, clientName, clientMachine);
   }
@@ -756,18 +790,21 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public boolean setReplication(String src, short replication) 
     throws IOException {  
+    checkOperation(OperationCategory.WRITE);
     return namesystem.setReplication(src, replication);
   }
     
   @Override // ClientProtocol
   public void setPermission(String src, FsPermission permissions)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.setPermission(src, permissions);
   }
 
   @Override // ClientProtocol
   public void setOwner(String src, String username, String groupname)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.setOwner(src, username, groupname);
   }
 
@@ -777,6 +814,7 @@ public class NameNode implements Namenod
                                ExtendedBlock previous,
                                DatanodeInfo[] excludedNodes)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*BLOCK* NameNode.addBlock: file "
           +src+" for "+clientName);
@@ -800,6 +838,7 @@ public class NameNode implements Namenod
       final DatanodeInfo[] existings, final DatanodeInfo[] excludes,
       final int numAdditionalNodes, final String clientName
       ) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if (LOG.isDebugEnabled()) {
       LOG.debug("getAdditionalDatanode: src=" + src
           + ", blk=" + blk
@@ -825,8 +864,10 @@ public class NameNode implements Namenod
   /**
    * The client needs to give up on the block.
    */
+  @Override // ClientProtocol
   public void abandonBlock(ExtendedBlock b, String src, String holder)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*BLOCK* NameNode.abandonBlock: "
           +b+" of file "+src);
@@ -839,6 +880,7 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public boolean complete(String src, String clientName, ExtendedBlock last)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.complete: "
           + src + " for " + clientName);
@@ -852,8 +894,9 @@ public class NameNode implements Namenod
    * mark the block as corrupt.  In the future we might 
    * check the blocks are actually corrupt. 
    */
-  @Override
+  @Override // ClientProtocol, DatanodeProtocol
   public void reportBadBlocks(LocatedBlock[] blocks) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     stateChangeLog.info("*DIR* NameNode.reportBadBlocks");
     for (int i = 0; i < blocks.length; i++) {
       ExtendedBlock blk = blocks[i].getBlock();
@@ -868,6 +911,7 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public LocatedBlock updateBlockForPipeline(ExtendedBlock block, String clientName)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     return namesystem.updateBlockForPipeline(block, clientName);
   }
 
@@ -876,6 +920,7 @@ public class NameNode implements Namenod
   public void updatePipeline(String clientName, ExtendedBlock oldBlock,
       ExtendedBlock newBlock, DatanodeID[] newNodes)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.updatePipeline(clientName, oldBlock, newBlock, newNodes);
   }
   
@@ -884,6 +929,7 @@ public class NameNode implements Namenod
       long newgenerationstamp, long newlength,
       boolean closeFile, boolean deleteblock, DatanodeID[] newtargets)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.commitBlockSynchronization(block,
         newgenerationstamp, newlength, closeFile, deleteblock, newtargets);
   }
@@ -891,12 +937,14 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public long getPreferredBlockSize(String filename) 
       throws IOException {
+    checkOperation(OperationCategory.READ);
     return namesystem.getPreferredBlockSize(filename);
   }
     
   @Deprecated
   @Override // ClientProtocol
   public boolean rename(String src, String dst) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst);
     }
@@ -913,12 +961,14 @@ public class NameNode implements Namenod
   
   @Override // ClientProtocol
   public void concat(String trg, String[] src) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.concat(trg, src);
   }
   
   @Override // ClientProtocol
   public void rename(String src, String dst, Options.Rename... options)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst);
     }
@@ -933,11 +983,13 @@ public class NameNode implements Namenod
   @Deprecated
   @Override // ClientProtocol
   public boolean delete(String src) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     return delete(src, true);
   }
 
   @Override // ClientProtocol
   public boolean delete(String src, boolean recursive) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if (stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* Namenode.delete: src=" + src
           + ", recursive=" + recursive);
@@ -962,6 +1014,7 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public boolean mkdirs(String src, FsPermission masked, boolean createParent)
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     if(stateChangeLog.isDebugEnabled()) {
       stateChangeLog.debug("*DIR* NameNode.mkdirs: " + src);
     }
@@ -976,13 +1029,14 @@ public class NameNode implements Namenod
 
   @Override // ClientProtocol
   public void renewLease(String clientName) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.renewLease(clientName);        
   }
 
   @Override // ClientProtocol
   public DirectoryListing getListing(String src, byte[] startAfter,
-      boolean needLocation)
-  throws IOException {
+      boolean needLocation) throws IOException {
+    checkOperation(OperationCategory.READ);
     DirectoryListing files = namesystem.getListing(
         src, startAfter, needLocation);
     if (files != null) {
@@ -994,12 +1048,14 @@ public class NameNode implements Namenod
 
   @Override // ClientProtocol
   public HdfsFileStatus getFileInfo(String src)  throws IOException {
+    checkOperation(OperationCategory.READ);
     metrics.incrFileInfoOps();
     return namesystem.getFileInfo(src, true);
   }
 
   @Override // ClientProtocol
   public HdfsFileStatus getFileLinkInfo(String src) throws IOException { 
+    checkOperation(OperationCategory.READ);
     metrics.incrFileInfoOps();
     return namesystem.getFileInfo(src, false);
   }
@@ -1012,6 +1068,7 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public DatanodeInfo[] getDatanodeReport(DatanodeReportType type)
       throws IOException {
+    checkOperation(OperationCategory.READ);
     DatanodeInfo results[] = namesystem.datanodeReport(type);
     if (results == null ) {
       throw new IOException("Cannot find datanode report");
@@ -1021,6 +1078,7 @@ public class NameNode implements Namenod
     
   @Override // ClientProtocol
   public boolean setSafeMode(SafeModeAction action) throws IOException {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.setSafeMode(action);
   }
 
@@ -1034,55 +1092,65 @@ public class NameNode implements Namenod
   @Override // ClientProtocol
   public boolean restoreFailedStorage(String arg) 
       throws AccessControlException {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.restoreFailedStorage(arg);
   }
 
   @Override // ClientProtocol
   public void saveNamespace() throws IOException {
+    // TODO:HA decide on OperationCategory for this
     namesystem.saveNamespace();
   }
 
   @Override // ClientProtocol
   public void refreshNodes() throws IOException {
+    // TODO:HA decide on OperationCategory for this
     namesystem.getBlockManager().getDatanodeManager().refreshNodes(
         new HdfsConfiguration());
   }
 
   @Override // NamenodeProtocol
   public long getTransactionID() {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.getTransactionID();
   }
 
   @Override // NamenodeProtocol
   public CheckpointSignature rollEditLog() throws IOException {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.rollEditLog();
   }
   
-  @Override
+  @Override // NamenodeProtocol
   public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
   throws IOException {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.getEditLog().getEditLogManifest(sinceTxId);
   }
     
   @Override // ClientProtocol
   public void finalizeUpgrade() throws IOException {
+    // TODO:HA decide on OperationCategory for this
     namesystem.finalizeUpgrade();
   }
 
   @Override // ClientProtocol
   public UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action)
       throws IOException {
+    // TODO:HA decide on OperationCategory for this
     return namesystem.distributedUpgradeProgress(action);
   }
 
   @Override // ClientProtocol
   public void metaSave(String filename) throws IOException {
+    // TODO:HA decide on OperationCategory for this
     namesystem.metaSave(filename);
   }
 
   @Override // ClientProtocol
   public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie)
       throws IOException {
+    checkOperation(OperationCategory.READ);
     Collection<FSNamesystem.CorruptFileBlockInfo> fbs =
       namesystem.listCorruptFileBlocks(path, cookie);
     
@@ -1104,34 +1172,40 @@ public class NameNode implements Namenod
    */
   @Override // ClientProtocol
   public void setBalancerBandwidth(long bandwidth) throws IOException {
+    // TODO:HA decide on OperationCategory for this
     namesystem.getBlockManager().getDatanodeManager().setBalancerBandwidth(bandwidth);
   }
   
   @Override // ClientProtocol
   public ContentSummary getContentSummary(String path) throws IOException {
+    checkOperation(OperationCategory.READ);
     return namesystem.getContentSummary(path);
   }
 
   @Override // ClientProtocol
   public void setQuota(String path, long namespaceQuota, long diskspaceQuota) 
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.setQuota(path, namespaceQuota, diskspaceQuota);
   }
   
   @Override // ClientProtocol
   public void fsync(String src, String clientName) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.fsync(src, clientName);
   }
 
   @Override // ClientProtocol
   public void setTimes(String src, long mtime, long atime) 
       throws IOException {
+    checkOperation(OperationCategory.WRITE);
     namesystem.setTimes(src, mtime, atime);
   }
 
   @Override // ClientProtocol
   public void createSymlink(String target, String link, FsPermission dirPerms,
       boolean createParent) throws IOException {
+    checkOperation(OperationCategory.WRITE);
     metrics.incrCreateSymlinkOps();
     /* We enforce the MAX_PATH_LENGTH limit even though a symlink target 
      * URI may refer to a non-HDFS file system. 
@@ -1151,6 +1225,7 @@ public class NameNode implements Namenod
 
   @Override // ClientProtocol
   public String getLinkTarget(String path) throws IOException {
+    checkOperation(OperationCategory.READ);
     metrics.incrGetLinkTargetOps();
     /* Resolves the first symlink in the given path, returning a
      * new path consisting of the target of the symlink and any 
@@ -1599,4 +1674,43 @@ public class NameNode implements Namenod
     }
     return clientMachine;
   }
+  
+  @Override // HAServiceProtocol
+  public synchronized void monitorHealth() throws HealthCheckFailedException {
+    if (!haEnabled) {
+      return; // no-op, if HA is not eanbled
+    }
+    // TODO:HA implement health check
+    return;
+  }
+  
+  @Override // HAServiceProtocol
+  public synchronized void transitionToActive() throws ServiceFailedException {
+    if (!haEnabled) {
+      throw new ServiceFailedException("HA for namenode is not enabled");
+    }
+    state.setState(this, ACTIVE_STATE);
+  }
+  
+  @Override // HAServiceProtocol
+  public synchronized void transitionToStandby() throws ServiceFailedException {
+    if (!haEnabled) {
+      throw new ServiceFailedException("HA for namenode is not enabled");
+    }
+    state.setState(this, STANDBY_STATE);
+  }
+  
+  /** Check if an operation of given category is allowed */
+  protected synchronized void checkOperation(final OperationCategory op)
+      throws UnsupportedActionException {
+    state.checkOperation(this, op);
+  }
+  
+  public synchronized HAState getState() {
+    return state;
+  }
+  
+  public synchronized void setState(final HAState s) {
+    state = s;
+  }
 }

Copied: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java (from r1159756, hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java)
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java?p2=hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java&p1=hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java&r1=1159756&r2=1159782&rev=1159782&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/UnsupportedActionException.java Fri Aug 19 20:47:40 2011
@@ -32,8 +32,7 @@ public class UnsupportedActionException 
   /** for java.io.Serializable */
   private static final long serialVersionUID = 1L;
 
-  public UnsupportedActionException(String action) {
-    super("Action " + action + "() is not supported.");
+  public UnsupportedActionException(String msg) {
+    super(msg);
   }
-
 }

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ActiveState.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ActiveState.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ActiveState.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/ActiveState.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import org.apache.hadoop.ha.ServiceFailedException;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
+import org.apache.hadoop.hdfs.server.namenode.UnsupportedActionException;
+
+/**
+ * Active state of the namenode. In this state, namenode provides the namenode
+ * service and handles operations of type {@link OperationCategory#WRITE} and
+ * {@link OperationCategory#READ}.
+ */
+public class ActiveState extends HAState {
+  public ActiveState() {
+    super("active");
+  }
+
+  @Override
+  public void checkOperation(NameNode nn, OperationCategory op)
+      throws UnsupportedActionException {
+    return; // Other than journal all operations are allowed in active state
+  }
+  
+  @Override
+  public void setState(NameNode nn, HAState s) throws ServiceFailedException {
+    if (s == NameNode.STANDBY_STATE) {
+      setStateInternal(nn, s);
+      return;
+    }
+    super.setState(nn, s);
+  }
+
+  @Override
+  protected void enterState(NameNode nn) throws ServiceFailedException {
+    // TODO:HA
+  }
+
+  @Override
+  protected void exitState(NameNode nn) throws ServiceFailedException {
+    // TODO:HA
+  }
+}

Added: hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BadFencingConfigurationException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BadFencingConfigurationException.java?rev=1159782&view=auto
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BadFencingConfigurationException.java (added)
+++ hadoop/common/branches/HDFS-1623/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BadFencingConfigurationException.java Fri Aug 19 20:47:40 2011
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.ha;
+
+import java.io.IOException;
+
+/**
+ * Indicates that the operator has specified an invalid configuration
+ * for fencing methods.
+ */
+class BadFencingConfigurationException extends IOException {
+  private static final long serialVersionUID = 1L;
+
+  public BadFencingConfigurationException(String msg) {
+    super(msg);
+  }
+
+  public BadFencingConfigurationException(String msg, Throwable cause) {
+    super(msg, cause);
+  }
+}
\ No newline at end of file



Mime
View raw message