hadoop-hdfs-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1446495 - in /hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs: ./ src/contrib/bkjournal/ src/main/java/ src/main/java/org/apache/hadoop/hdfs/ src/main/java/org/apache/hadoop/hdfs/server/namenode/ src/main/native/ sr...
Date Fri, 15 Feb 2013 10:18:07 GMT
Author: suresh
Date: Fri Feb 15 10:18:03 2013
New Revision: 1446495

URL: http://svn.apache.org/r1446495
Log:
Merged trunk to branch-trunk-win

Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/native/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/
  (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLease.java
    hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs:r1443752-1446489

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri
Feb 15 10:18:03 2013
@@ -314,6 +314,13 @@ Release 2.0.4-beta - UNRELEASED
     HDFS-4470. Several HDFS tests attempt file operations on invalid HDFS
     paths when running on Windows. (Chris Nauroth via suresh)
 
+    HDFS-4471. Namenode WebUI file browsing does not work with wildcard
+    addresses configured. (Andrew Wang via atm)
+
+    HDFS-4342. Directories configured in dfs.namenode.edits.dir.required
+    but not in dfs.namenode.edits.dir are silently ignored.  (Arpit Agarwal
+    via szetszwo)
+
 Release 2.0.3-alpha - 2013-02-06
 
   INCOMPATIBLE CHANGES
@@ -2289,6 +2296,9 @@ Release 0.23.7 - UNRELEASED
 
     HDFS-4288. NN accepts incremental BR as IBR in safemode (daryn via kihwal)
 
+    HDFS-4495. Allow client-side lease renewal to be retried beyond soft-limit
+    (kihwal)
+
 Release 0.23.6 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/pom.xml?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/pom.xml Fri Feb
15 10:18:03 2013
@@ -209,7 +209,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
             </goals>
             <configuration>
               <compile>false</compile>
-              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
               <webFragmentFile>${project.build.directory}/hdfs-jsp-servlet-definitions.xml</webFragmentFile>
               <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
               <sources>
@@ -228,7 +228,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
             </goals>
             <configuration>
               <compile>false</compile>
-              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
               <webFragmentFile>${project.build.directory}/secondary-jsp-servlet-definitions.xml</webFragmentFile>
               <packageName>org.apache.hadoop.hdfs.server.namenode</packageName>
               <sources>
@@ -247,7 +247,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
             </goals>
             <configuration>
               <compile>false</compile>
-              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
               <webFragmentFile>${project.build.directory}/journal-jsp-servlet-definitions.xml</webFragmentFile>
               <packageName>org.apache.hadoop.hdfs.server.journalservice</packageName>
               <sources>
@@ -266,7 +266,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
             </goals>
             <configuration>
               <compile>false</compile>
-              <workingDirectory>${project.build.directory}/generated-src/main/jsp</workingDirectory>
+              <workingDirectory>${project.build.directory}/generated-sources/java</workingDirectory>
               <webFragmentFile>${project.build.directory}/datanode-jsp-servlet-definitions.xml</webFragmentFile>
               <packageName>org.apache.hadoop.hdfs.server.datanode</packageName>
               <sources>
@@ -301,7 +301,7 @@ http://maven.apache.org/xsd/maven-4.0.0.
         <artifactId>build-helper-maven-plugin</artifactId>
         <executions>
           <execution>
-            <id>add-source</id>
+            <id>add-jsp-generated-sources-directory</id>
             <phase>generate-sources</phase>
             <goals>
               <goal>add-source</goal>
@@ -309,7 +309,6 @@ http://maven.apache.org/xsd/maven-4.0.0.
             <configuration>
               <sources>
                 <source>${project.build.directory}/generated-sources/java</source>
-                <source>${project.build.directory}/generated-src/main/jsp</source>
               </sources>
             </configuration>
           </execution>
@@ -323,14 +322,14 @@ http://maven.apache.org/xsd/maven-4.0.0.
         </configuration>
         <executions>
           <execution>
-            <id>create-protobuf-generated-sources-directory</id>
+            <id>create-jsp-generated-sources-directory</id>
             <phase>initialize</phase>
             <goals>
               <goal>run</goal>
             </goals>
             <configuration>
               <target>
-                <mkdir dir="target/generated-sources/java" />
+                <mkdir dir="${project.build.directory}/generated-sources/java" />
               </target>
             </configuration>
           </execution>
@@ -408,80 +407,96 @@ http://maven.apache.org/xsd/maven-4.0.0.
         </executions>
       </plugin>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
         <executions>
           <execution>
-            <id>compile-proto</id>
+            <id>compile-protoc</id>
             <phase>generate-sources</phase>
             <goals>
-              <goal>exec</goal>
+              <goal>protoc</goal>
             </goals>
             <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/hdfs.proto</argument>
-                <argument>src/main/proto/GetUserMappingsProtocol.proto</argument>
-                <argument>src/main/proto/HAZKInfo.proto</argument>
-                <argument>src/main/proto/InterDatanodeProtocol.proto</argument>
-                <argument>src/main/proto/JournalProtocol.proto</argument>
-                <argument>src/main/proto/RefreshAuthorizationPolicyProtocol.proto</argument>
-                <argument>src/main/proto/RefreshUserMappingsProtocol.proto</argument>
-                <argument>src/main/proto/datatransfer.proto</argument>
-              </arguments>
+              <imports>
+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>GetUserMappingsProtocol.proto</include>
+                  <include>HAZKInfo.proto</include>
+                  <include>InterDatanodeProtocol.proto</include>
+                  <include>JournalProtocol.proto</include>
+                  <include>RefreshAuthorizationPolicyProtocol.proto</include>
+                  <include>RefreshUserMappingsProtocol.proto</include>
+                  <include>datatransfer.proto</include>
+                  <include>hdfs.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
             </configuration>
           </execution>
           <execution>
-            <id>compile-proto-datanode</id>
+            <id>compile-protoc-datanode</id>
             <phase>generate-sources</phase>
             <goals>
-              <goal>exec</goal>
+              <goal>protoc</goal>
             </goals>
             <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/ClientDatanodeProtocol.proto</argument>
-                <argument>src/main/proto/DatanodeProtocol.proto</argument>
-              </arguments>
+              <imports>
+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>ClientDatanodeProtocol.proto</include>
+                  <include>DatanodeProtocol.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
             </configuration>
           </execution>
           <execution>
-            <id>compile-proto-namenode</id>
+            <id>compile-protoc-namenode</id>
             <phase>generate-sources</phase>
             <goals>
-              <goal>exec</goal>
+              <goal>protoc</goal>
             </goals>
             <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/ClientNamenodeProtocol.proto</argument>
-                <argument>src/main/proto/NamenodeProtocol.proto</argument>
-              </arguments>
+              <imports>
+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>ClientNamenodeProtocol.proto</include>
+                  <include>NamenodeProtocol.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
             </configuration>
           </execution>
           <execution>
-            <id>compile-proto-qjournal</id>
+            <id>compile-protoc-qjournal</id>
             <phase>generate-sources</phase>
             <goals>
-              <goal>exec</goal>
+              <goal>protoc</goal>
             </goals>
             <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/QJournalProtocol.proto</argument>
-              </arguments>
+              <imports>
+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>QJournalProtocol.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
             </configuration>
           </execution>
         </executions>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
Fri Feb 15 10:18:03 2013
@@ -92,63 +92,28 @@ http://maven.apache.org/xsd/maven-4.0.0.
   <build>
     <plugins>
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>build-helper-maven-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>add-source</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>add-source</goal>
-            </goals>
-            <configuration>
-              <sources>
-                <source>${project.build.directory}/generated-sources/java</source>
-              </sources>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <configuration>
-          <skipTests>false</skipTests>
-        </configuration>
-        <executions>
-          <execution>
-            <id>create-protobuf-generated-sources-directory</id>
-            <phase>initialize</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <target>
-                <mkdir dir="target/generated-sources/java" />
-              </target>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-maven-plugins</artifactId>
         <executions>
           <execution>
-            <id>compile-proto</id>
+            <id>compile-protoc</id>
             <phase>generate-sources</phase>
             <goals>
-              <goal>exec</goal>
+              <goal>protoc</goal>
             </goals>
             <configuration>
-              <executable>protoc</executable>
-              <arguments>
-                <argument>-I../../../../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
-                <argument>-Isrc/main/proto/</argument>
-                <argument>-I../../main/proto</argument>
-                <argument>--java_out=target/generated-sources/java</argument>
-                <argument>src/main/proto/bkjournal.proto</argument>
-              </arguments>
+              <imports>
+                <param>${basedir}/../../../../../hadoop-common-project/hadoop-common/src/main/proto</param>
+                <param>${basedir}/../../../../../hadoop-hdfs-project/hadoop-hdfs/src/main/proto</param>
+                <param>${basedir}/src/main/proto</param>
+              </imports>
+              <source>
+                <directory>${basedir}/src/main/proto</directory>
+                <includes>
+                  <include>bkjournal.proto</include>
+                </includes>
+              </source>
+              <output>${project.build.directory}/generated-sources/java</output>
             </configuration>
           </execution>
         </executions>

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java:r1443752-1446489

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
Fri Feb 15 10:18:03 2013
@@ -620,10 +620,10 @@ public class DFSClient implements java.i
       } catch (IOException e) {
         // Abort if the lease has already expired. 
         final long elapsed = Time.now() - getLastLeaseRenewal();
-        if (elapsed > HdfsConstants.LEASE_SOFTLIMIT_PERIOD) {
+        if (elapsed > HdfsConstants.LEASE_HARDLIMIT_PERIOD) {
           LOG.warn("Failed to renew lease for " + clientName + " for "
               + (elapsed/1000) + " seconds (>= soft-limit ="
-              + (HdfsConstants.LEASE_SOFTLIMIT_PERIOD/1000) + " seconds.) "
+              + (HdfsConstants.LEASE_HARDLIMIT_PERIOD/1000) + " seconds.) "
               + "Closing all files being written ...", e);
           closeAllFilesBeingWritten(true);
         } else {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
Fri Feb 15 10:18:03 2013
@@ -235,6 +235,7 @@ public class DFSConfigKeys extends Commo
   public static final String  DFS_NAMENODE_SHARED_EDITS_DIR_KEY = "dfs.namenode.shared.edits.dir";
   public static final String  DFS_NAMENODE_EDITS_PLUGIN_PREFIX = "dfs.namenode.edits.journal-plugin";
   public static final String  DFS_NAMENODE_EDITS_DIR_REQUIRED_KEY = "dfs.namenode.edits.dir.required";
+  public static final String  DFS_NAMENODE_EDITS_DIR_DEFAULT = "file:///tmp/hadoop/dfs/name";
   public static final String  DFS_CLIENT_READ_PREFETCH_SIZE_KEY = "dfs.client.read.prefetch.size";

   public static final String  DFS_CLIENT_RETRY_WINDOW_BASE= "dfs.client.retry.window.base";
   public static final String  DFS_METRICS_SESSION_ID_KEY = "dfs.metrics.session-id";

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
Fri Feb 15 10:18:03 2013
@@ -127,6 +127,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
 import org.apache.hadoop.ha.ServiceFailedException;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.HAUtil;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
@@ -422,51 +423,73 @@ public class FSNamesystem implements Nam
   }
   
   /**
-
-  /**
-   * Instantiates an FSNamesystem loaded from the image and edits
-   * directories specified in the passed Configuration.
-   * 
-   * @param conf the Configuration which specifies the storage directories
-   *             from which to load
-   * @return an FSNamesystem which contains the loaded namespace
-   * @throws IOException if loading fails
+   * Check the supplied configuration for correctness.
+   * @param conf Supplies the configuration to validate.
+   * @throws IOException if the configuration could not be queried.
+   * @throws IllegalArgumentException if the configuration is invalid.
    */
-  public static FSNamesystem loadFromDisk(Configuration conf)
+  private static void checkConfiguration(Configuration conf)
       throws IOException {
-    Collection<URI> namespaceDirs = FSNamesystem.getNamespaceDirs(conf);
-    List<URI> namespaceEditsDirs = 
-      FSNamesystem.getNamespaceEditsDirs(conf);
-    return loadFromDisk(conf, namespaceDirs, namespaceEditsDirs);
-  }
 
-  /**
-   * Instantiates an FSNamesystem loaded from the image and edits
-   * directories passed.
-   * 
-   * @param conf the Configuration which specifies the storage directories
-   *             from which to load
-   * @param namespaceDirs directories to load the fsimages
-   * @param namespaceEditsDirs directories to load the edits from
-   * @return an FSNamesystem which contains the loaded namespace
-   * @throws IOException if loading fails
-   */
-  public static FSNamesystem loadFromDisk(Configuration conf,
-      Collection<URI> namespaceDirs, List<URI> namespaceEditsDirs)
-      throws IOException {
+    final Collection<URI> namespaceDirs =
+        FSNamesystem.getNamespaceDirs(conf);
+    final Collection<URI> editsDirs =
+        FSNamesystem.getNamespaceEditsDirs(conf);
+    final Collection<URI> requiredEditsDirs =
+        FSNamesystem.getRequiredNamespaceEditsDirs(conf);
+    final Collection<URI> sharedEditsDirs =
+        FSNamesystem.getSharedEditsDirs(conf);
+
+    for (URI u : requiredEditsDirs) {
+      if (u.toString().compareTo(
+              DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_DEFAULT) == 0) {
+        continue;
+      }
+
+      // Each required directory must also be in editsDirs or in
+      // sharedEditsDirs.
+      if (!editsDirs.contains(u) &&
+          !sharedEditsDirs.contains(u)) {
+        throw new IllegalArgumentException(
+            "Required edits directory " + u.toString() + " not present in " +
+            DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY + ". " +
+            DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY + "=" +
+            editsDirs.toString() + "; " +
+            DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_REQUIRED_KEY + "=" +
+            requiredEditsDirs.toString() + ". " +
+            DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY + "=" +
+            sharedEditsDirs.toString() + ".");
+      }
+    }
 
     if (namespaceDirs.size() == 1) {
       LOG.warn("Only one image storage directory ("
           + DFS_NAMENODE_NAME_DIR_KEY + ") configured. Beware of dataloss"
           + " due to lack of redundant storage directories!");
     }
-    if (namespaceEditsDirs.size() == 1) {
+    if (editsDirs.size() == 1) {
       LOG.warn("Only one namespace edits storage directory ("
           + DFS_NAMENODE_EDITS_DIR_KEY + ") configured. Beware of dataloss"
           + " due to lack of redundant storage directories!");
     }
+  }
+
+  /**
+   * Instantiates an FSNamesystem loaded from the image and edits
+   * directories specified in the passed Configuration.
+   *
+   * @param conf the Configuration which specifies the storage directories
+   *             from which to load
+   * @return an FSNamesystem which contains the loaded namespace
+   * @throws IOException if loading fails
+   */
+  public static FSNamesystem loadFromDisk(Configuration conf)
+      throws IOException {
 
-    FSImage fsImage = new FSImage(conf, namespaceDirs, namespaceEditsDirs);
+    checkConfiguration(conf);
+    FSImage fsImage = new FSImage(conf,
+        FSNamesystem.getNamespaceDirs(conf),
+        FSNamesystem.getNamespaceEditsDirs(conf));
     FSNamesystem namesystem = new FSNamesystem(conf, fsImage);
     StartupOption startOpt = NameNode.getStartupOption(conf);
     if (startOpt == StartupOption.RECOVER) {
@@ -913,7 +936,8 @@ public class FSNamesystem implements Nam
           "\n\t\t- use Backup Node as a persistent and up-to-date storage " +
           "of the file system meta-data.");
     } else if (dirNames.isEmpty()) {
-      dirNames = Collections.singletonList("file:///tmp/hadoop/dfs/name");
+      dirNames = Collections.singletonList(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_DEFAULT);
     }
     return Util.stringCollectionAsURIs(dirNames);
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
Fri Feb 15 10:18:03 2013
@@ -78,6 +78,7 @@ import org.apache.hadoop.util.ServicePlu
 import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
@@ -781,6 +782,26 @@ public class NameNode {
   }
 
   /**
+   * Clone the supplied configuration but remove the shared edits dirs.
+   *
+   * @param conf Supplies the original configuration.
+   * @return Cloned configuration without the shared edit dirs.
+   * @throws IOException on failure to generate the configuration.
+   */
+  private static Configuration getConfigurationWithoutSharedEdits(
+      Configuration conf)
+      throws IOException {
+    List<URI> editsDirs = FSNamesystem.getNamespaceEditsDirs(conf, false);
+    String editsDirsString = Joiner.on(",").join(editsDirs);
+
+    Configuration confWithoutShared = new Configuration(conf);
+    confWithoutShared.unset(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
+    confWithoutShared.setStrings(DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
+        editsDirsString);
+    return confWithoutShared;
+  }
+
+  /**
    * Format a new shared edits dir and copy in enough edit log segments so that
    * the standby NN can start up.
    * 
@@ -809,11 +830,8 @@ public class NameNode {
 
     NNStorage existingStorage = null;
     try {
-      Configuration confWithoutShared = new Configuration(conf);
-      confWithoutShared.unset(DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY);
-      FSNamesystem fsns = FSNamesystem.loadFromDisk(confWithoutShared,
-          FSNamesystem.getNamespaceDirs(conf),
-          FSNamesystem.getNamespaceEditsDirs(conf, false));
+      FSNamesystem fsns =
+          FSNamesystem.loadFromDisk(getConfigurationWithoutSharedEdits(conf));
       
       existingStorage = fsns.getFSImage().getStorage();
       NamespaceInfo nsInfo = existingStorage.getNamespaceInfo();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeJspHelper.java
Fri Feb 15 10:18:03 2013
@@ -25,6 +25,7 @@ import java.lang.management.MemoryMXBean
 import java.lang.management.MemoryUsage;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
+import java.net.URI;
 import java.net.URLEncoder;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -453,7 +454,13 @@ class NamenodeJspHelper {
       nodeToRedirect = nn.getHttpAddress().getHostName();
       redirectPort = nn.getHttpAddress().getPort();
     }
-    String addr = nn.getNameNodeAddressHostPortString();
+
+    InetSocketAddress rpcAddr = nn.getNameNodeAddress();
+    String rpcHost = rpcAddr.getAddress().isAnyLocalAddress()
+      ? URI.create(request.getRequestURL().toString()).getHost()
+      : rpcAddr.getAddress().getHostAddress();
+    String addr = rpcHost + ":" + rpcAddr.getPort();
+
     String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
     redirectLocation = HttpConfig.getSchemePrefix() + fqdn + ":" + redirectPort
         + "/browseDirectory.jsp?namenodeInfoPort="

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/native/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/native:r1443752-1446489

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/datanode:r1443752-1446489

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs:r1443752-1446489

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary:r1443752-1446489

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/hdfs:r1443752-1446489

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLease.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLease.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLease.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLease.java
Fri Feb 15 10:18:03 2013
@@ -41,6 +41,7 @@ import org.apache.hadoop.fs.Options;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
 import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
@@ -93,9 +94,26 @@ public class TestLease {
 
       // We don't need to wait the lease renewer thread to act.
       // call renewLease() manually.
-      // make it look like lease has already expired.
+      // make it look like the soft limit has been exceeded.
       LeaseRenewer originalRenewer = dfs.getLeaseRenewer();
-      dfs.lastLeaseRenewal = Time.now() - 300000;
+      dfs.lastLeaseRenewal = Time.now()
+      - HdfsConstants.LEASE_SOFTLIMIT_PERIOD - 1000;
+      try {
+        dfs.renewLease();
+      } catch (IOException e) {}
+
+      // Things should continue to work it passes hard limit without
+      // renewing.
+      try {
+        d_out.write(buf, 0, 1024);
+        LOG.info("Write worked beyond the soft limit as expected.");
+      } catch (IOException e) {
+        Assert.fail("Write failed.");
+      }
+
+      // make it look like the hard limit has been exceeded.
+      dfs.lastLeaseRenewal = Time.now()
+      - HdfsConstants.LEASE_HARDLIMIT_PERIOD - 1000;
       dfs.renewLease();
 
       // this should not work.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java?rev=1446495&r1=1446494&r2=1446495&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameEditsConfigs.java
Fri Feb 15 10:18:03 2013
@@ -309,6 +309,88 @@ public class TestNameEditsConfigs {
   }
 
   /**
+   * Test edits.dir.required configuration options.
+   * 1. Directory present in dfs.namenode.edits.dir.required but not in
+   *    dfs.namenode.edits.dir. Expected to fail.
+   * 2. Directory present in both dfs.namenode.edits.dir.required and
+   *    dfs.namenode.edits.dir. Expected to succeed.
+   * 3. Directory present only in dfs.namenode.edits.dir. Expected to
+   *    succeed.
+   */
+  @Test
+  public void testNameEditsRequiredConfigs() throws IOException {
+    MiniDFSCluster cluster = null;
+    File nameAndEditsDir = new File(base_dir, "name_and_edits");
+    File nameAndEditsDir2 = new File(base_dir, "name_and_edits2");
+
+    // 1
+    // Bad configuration. Add a directory to dfs.namenode.edits.dir.required
+    // without adding it to dfs.namenode.edits.dir.
+    try {
+      Configuration conf = new HdfsConfiguration();
+      conf.set(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_REQUIRED_KEY,
+          nameAndEditsDir2.toURI().toString());
+      conf.set(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
+          nameAndEditsDir.toURI().toString());
+      cluster = new MiniDFSCluster.Builder(conf)
+          .numDataNodes(NUM_DATA_NODES)
+          .manageNameDfsDirs(false)
+          .build();
+      fail("Successfully started cluster but should not have been able to.");
+    } catch (IllegalArgumentException iae) { // expect to fail
+      LOG.info("EXPECTED: cluster start failed due to bad configuration" + iae);
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+      cluster = null;
+    }
+
+    // 2
+    // Good configuration. Add a directory to both dfs.namenode.edits.dir.required
+    // and dfs.namenode.edits.dir.
+    try {
+      Configuration conf = new HdfsConfiguration();
+      conf.setStrings(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
+          nameAndEditsDir.toURI().toString(),
+          nameAndEditsDir2.toURI().toString());
+      conf.set(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_REQUIRED_KEY,
+          nameAndEditsDir2.toURI().toString());
+      cluster = new MiniDFSCluster.Builder(conf)
+          .numDataNodes(NUM_DATA_NODES)
+          .manageNameDfsDirs(false)
+          .build();
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+
+    // 3
+    // Good configuration. Adds a directory to dfs.namenode.edits.dir but not to
+    // dfs.namenode.edits.dir.required.
+    try {
+      Configuration conf = new HdfsConfiguration();
+      conf.setStrings(
+          DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY,
+          nameAndEditsDir.toURI().toString(),
+          nameAndEditsDir2.toURI().toString());
+      cluster = new MiniDFSCluster.Builder(conf)
+          .numDataNodes(NUM_DATA_NODES)
+          .manageNameDfsDirs(false)
+          .build();
+    } finally {
+      if (cluster != null) {
+        cluster.shutdown();
+      }
+    }
+  }
+
+  /**
    * Test various configuration options of dfs.namenode.name.dir and dfs.namenode.edits.dir
    * This test tries to simulate failure scenarios.
    * 1. Start cluster with shared name and edits dir



Mime
View raw message