hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a..@apache.org
Subject svn commit: r1179484 [1/2] - in /hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common: ./ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/conf/ src/main/java/org/apache/hadoop/http/ src/main/java/org/apache/hadoop/io/retry...
Date Thu, 06 Oct 2011 01:16:56 GMT
Author: atm
Date: Thu Oct  6 01:16:48 2011
New Revision: 1179484

URL: http://svn.apache.org/viewvc?rev=1179484&view=rev
Log:
Merging trunk to HDFS-1623 branch.

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-applications.sh
      - copied unchanged from r1179483, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-applications.sh
Modified:
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/hadoop.control/preinst
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/spec/hadoop.spec
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
    hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt Thu Oct  6 01:16:48 2011
@@ -4,10 +4,6 @@ Trunk (unreleased changes)
 
   INCOMPATIBLE CHANGES
    
-   HADOOP-7542. Change Configuration XML format to 1.1 to add support for
-                serializing additional characters. This requires XML1.1
-                support in the XML parser (Christopher Egner via harsh)
-
   IMPROVEMENTS
 
     HADOOP-7595. Upgrade dependency to Avro 1.5.3. (Alejandro Abdelnur via atm)
@@ -21,10 +17,19 @@ Trunk (unreleased changes)
                  close (atm)
     
     HADOOP-7668. Add a NetUtils method that can tell if an InetAddress 
-    belongs to local host. (suresh)
+                 belongs to local host. (suresh)
 
     HADOOP-7687 Make getProtocolSignature public  (sanjay)
 
+    HADOOP-7693. Enhance AvroRpcEngine to support the new #addProtocol
+                 interface introduced in HADOOP-7524.  (cutting)
+
+	  HADOOP-7716. RPC protocol registration on SS does not log the protocol name
+	               (only the class which may be different) (sanjay)
+
+    HADOOP-7717. Move handling of concurrent client fail-overs to
+                 RetryInvocationHandler (atm)
+
   BUGS
 
     HADOOP-7606. Upgrade Jackson to version 1.7.1 to match the version required
@@ -42,7 +47,12 @@ Trunk (unreleased changes)
     
     HADOOP-6220. HttpServer wraps InterruptedExceptions by IOExceptions if interrupted 
                  in startup (stevel)
-                 
+
+    HADOOP-7703. Improved excpetion handling of shutting down web server.
+    (Devaraj K via Eric Yang)
+
+    HADOOP-7704. Reduce number of object created by JMXJsonServlet.
+    (Devaraj K via Eric Yang)
 
 Release 0.23.0 - Unreleased
 
@@ -413,6 +423,18 @@ Release 0.23.0 - Unreleased
     HADOOP-7575. Enhanced LocalDirAllocator to support fully-qualified
     paths. (Jonathan Eagles via vinodkv)
 
+    HADOOP-7469  Add a standard handler for socket connection problems which
+                 improves diagnostics (Uma Maheswara Rao G  and stevel via stevel)
+
+    HADOOP-7710. Added hadoop-setup-application.sh for creating 
+    application directory (Arpit Gupta via Eric Yang)
+
+    HADOOP-7707. Added toggle for dfs.support.append, webhdfs and hadoop proxy
+    user to setup config script. (Arpit Gupta via Eric Yang)
+
+    HADOOP-7720. Added parameter for HBase user to setup config script.
+    (Arpit Gupta via Eric Yang)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
@@ -634,6 +656,31 @@ Release 0.23.0 - Unreleased
     HADOOP-7662. Fixed logs servlet to use the pathspec '/*' instead of '/'
     for correct filtering. (Thomas Graves via vinodkv)
 
+    HADOOP-7691. Fixed conflict uid for install packages. (Eric Yang)
+
+    HADOOP-7603. Set hdfs, mapred uid, and hadoop uid to fixed numbers. 
+    (Eric Yang)
+
+    HADOOP-7658. Fixed HADOOP_SECURE_DN_USER environment variable in 
+    hadoop-evn.sh (Eric Yang)
+
+    HADOOP-7684. Added init.d script for jobhistory server and
+    secondary namenode. (Eric Yang)
+
+    HADOOP-7715. Removed unnecessary security logger configuration. (Eric Yang)
+
+    HADOOP-7685. Improved directory ownership check function in 
+    hadoop-setup-conf.sh. (Eric Yang)
+
+    HADOOP-7711. Fixed recursive sourcing of HADOOP_OPTS environment
+    variables (Arpit Gupta via Eric Yang)
+
+    HADOOP-7681. Fixed security and hdfs audit log4j properties
+    (Arpit Gupta via Eric Yang)
+
+    HADOOP-7708. Fixed hadoop-setup-conf.sh to handle config files
+    consistently.  (Eric Yang)
+
 Release 0.22.0 - Unreleased
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Oct  6 01:16:48 2011
@@ -1,5 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1177128
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:1161333-1179483
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Oct  6 01:16:48 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1177128
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:1152502-1179483
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Oct  6 01:16:48 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1177128
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:1152502-1179483
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Thu Oct  6 01:16:48 2011
@@ -1632,10 +1632,6 @@ public class Configuration implements It
     try {
       doc =
         DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
-
-      // Allow a broader set of control characters to appear in job confs.
-      // cf https://issues.apache.org/jira/browse/MAPREDUCE-109 
-      doc.setXmlVersion( "1.1" );
     } catch (ParserConfigurationException pe) {
       throw new IOException(pe);
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Thu Oct  6 01:16:48 2011
@@ -210,7 +210,7 @@ public class HttpServer implements Filte
     webServer.setHandler(contexts);
 
     webAppContext = new WebAppContext();
-    webAppContext.setDisplayName("WepAppsContext");
+    webAppContext.setDisplayName(name);
     webAppContext.setContextPath("/");
     webAppContext.setWar(appDir + "/" + name);
     webAppContext.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
@@ -696,8 +696,44 @@ public class HttpServer implements Filte
    * stop the server
    */
   public void stop() throws Exception {
-    listener.close();
-    webServer.stop();
+    MultiException exception = null;
+    try {
+      listener.close();
+    } catch (Exception e) {
+      LOG.error("Error while stopping listener for webapp"
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    try {
+      // clear & stop webAppContext attributes to avoid memory leaks.
+      webAppContext.clearAttributes();
+      webAppContext.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web app context for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+    try {
+      webServer.stop();
+    } catch (Exception e) {
+      LOG.error("Error while stopping web server for webapp "
+          + webAppContext.getDisplayName(), e);
+      exception = addMultiException(exception, e);
+    }
+
+    if (exception != null) {
+      exception.ifExceptionThrow();
+    }
+
+  }
+
+  private MultiException addMultiException(MultiException exception, Exception e) {
+    if(exception == null){
+      exception = new MultiException();
+    }
+    exception.add(e);
+    return exception;
   }
 
   public void join() throws InterruptedException {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Thu Oct  6 01:16:48 2011
@@ -24,6 +24,7 @@ import java.lang.reflect.InvocationTarge
 import java.lang.reflect.Method;
 import java.util.Collections;
 import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -32,6 +33,11 @@ import org.apache.hadoop.io.retry.RetryP
 class RetryInvocationHandler implements InvocationHandler, Closeable {
   public static final Log LOG = LogFactory.getLog(RetryInvocationHandler.class);
   private FailoverProxyProvider proxyProvider;
+
+  /**
+   * The number of times the associated proxyProvider has ever been failed over.
+   */
+  private long proxyProviderFailoverCount = 0;
   
   private RetryPolicy defaultPolicy;
   private Map<String,RetryPolicy> methodNameToPolicyMap;
@@ -60,16 +66,24 @@ class RetryInvocationHandler implements 
       policy = defaultPolicy;
     }
     
-    int failovers = 0;
+    // The number of times this method invocation has been failed over.
+    int invocationFailoverCount = 0;
     int retries = 0;
     while (true) {
+      // The number of times this invocation handler has ever been failed over,
+      // before this method invocation attempt. Used to prevent concurrent
+      // failed method invocations from triggering multiple failover attempts.
+      long invocationAttemptFailoverCount;
+      synchronized (proxyProvider) {
+        invocationAttemptFailoverCount = proxyProviderFailoverCount;
+      }
       try {
         return invokeMethod(method, args);
       } catch (Exception e) {
         boolean isMethodIdempotent = proxyProvider.getInterface()
             .getMethod(method.getName(), method.getParameterTypes())
             .isAnnotationPresent(Idempotent.class);
-        RetryAction action = policy.shouldRetry(e, retries++, failovers,
+        RetryAction action = policy.shouldRetry(e, retries++, invocationFailoverCount,
             isMethodIdempotent);
         if (action == RetryAction.FAIL) {
           LOG.warn("Exception while invoking " + method.getName()
@@ -81,10 +95,24 @@ class RetryInvocationHandler implements 
         } else if (action == RetryAction.FAILOVER_AND_RETRY) {
           LOG.warn("Exception while invoking " + method.getName()
               + " of " + currentProxy.getClass()
-              + ". Trying to fail over.", e);
-          failovers++;
-          proxyProvider.performFailover(currentProxy);
+              + " after " + invocationFailoverCount + " fail over attempts."
+              + " Trying to fail over.", e);
+          // Make sure that concurrent failed method invocations only cause a
+          // single actual fail over.
+          synchronized (proxyProvider) {
+            if (invocationAttemptFailoverCount == proxyProviderFailoverCount) {
+              proxyProvider.performFailover(currentProxy);
+              proxyProviderFailoverCount++;
+            } else {
+              LOG.warn("A failover has occurred since the start of this method"
+                  + " invocation attempt.");
+            }
+          }
+          // The call to getProxy() could technically only be made in the event
+          // performFailover() is called, but it needs to be out here for the
+          // purpose of testing.
           currentProxy = proxyProvider.getProxy();
+          invocationFailoverCount++;
         }
         if(LOG.isDebugEnabled()) {
           LOG.debug("Exception while invoking " + method.getName()

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AvroRpcEngine.java Thu Oct  6 01:16:48 2011
@@ -29,6 +29,8 @@ import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
 
 import javax.net.SocketFactory;
 
@@ -54,7 +56,7 @@ import org.apache.hadoop.security.token.
 public class AvroRpcEngine implements RpcEngine {
   private static final Log LOG = LogFactory.getLog(RPC.class);
 
-  private static int VERSION = 0;
+  private static int VERSION = 1;
 
   // the implementation we tunnel through
   private static final RpcEngine ENGINE = new WritableRpcEngine();
@@ -62,9 +64,10 @@ public class AvroRpcEngine implements Rp
   /** Tunnel an Avro RPC request and response through Hadoop's RPC. */
   private static interface TunnelProtocol extends VersionedProtocol {
     //WritableRpcEngine expects a versionID in every protocol.
-    public static final long versionID = 0L;
+    public static final long versionID = VERSION;
     /** All Avro methods and responses go through this. */
-    BufferListWritable call(BufferListWritable request) throws IOException;
+    BufferListWritable call(String protocol, BufferListWritable request)
+      throws IOException;
   }
 
   /** A Writable that holds a List<ByteBuffer>, The Avro RPC Transceiver's
@@ -103,23 +106,25 @@ public class AvroRpcEngine implements Rp
   private static class ClientTransceiver extends Transceiver {
     private TunnelProtocol tunnel;
     private InetSocketAddress remote;
+    private String protocol;
   
     public ClientTransceiver(InetSocketAddress addr,
                              UserGroupInformation ticket,
                              Configuration conf, SocketFactory factory,
-                             int rpcTimeout)
+                             int rpcTimeout, String protocol)
       throws IOException {
       this.tunnel = ENGINE.getProxy(TunnelProtocol.class, VERSION,
                                         addr, ticket, conf, factory,
                                         rpcTimeout).getProxy();
       this.remote = addr;
+      this.protocol = protocol;
     }
 
     public String getRemoteName() { return remote.toString(); }
 
     public List<ByteBuffer> transceive(List<ByteBuffer> request)
       throws IOException {
-      return tunnel.call(new BufferListWritable(request)).buffers;
+      return tunnel.call(protocol, new BufferListWritable(request)).buffers;
     }
 
     public List<ByteBuffer> readBuffers() throws IOException {
@@ -159,7 +164,8 @@ public class AvroRpcEngine implements Rp
                    UserGroupInformation ticket, Configuration conf,
                    SocketFactory factory,
                    int rpcTimeout) throws IOException {
-      this.tx = new ClientTransceiver(addr, ticket, conf, factory, rpcTimeout);
+      this.tx = new ClientTransceiver(addr, ticket, conf, factory, rpcTimeout,
+                                      protocol.getName());
       this.requestor = createRequestor(protocol, tx);
     }
     @Override public Object invoke(Object proxy, Method method, Object[] args) 
@@ -182,9 +188,11 @@ public class AvroRpcEngine implements Rp
 
   /** An Avro RPC Responder that can process requests passed via Hadoop RPC. */
   private class TunnelResponder implements TunnelProtocol {
-    private Responder responder;
-    public TunnelResponder(Class<?> iface, Object impl) {
-      responder = createResponder(iface, impl);
+    private Map<String, Responder> responders =
+      new HashMap<String, Responder>();
+
+    public void addProtocol(Class<?> iface, Object impl) {
+      responders.put(iface.getName(), createResponder(iface, impl));
     }
 
     @Override
@@ -197,13 +205,18 @@ public class AvroRpcEngine implements Rp
     public ProtocolSignature getProtocolSignature(
         String protocol, long version, int clientMethodsHashCode)
       throws IOException {
-      return new ProtocolSignature(VERSION, null);
+      return ProtocolSignature.getProtocolSignature
+        (clientMethodsHashCode, VERSION, TunnelProtocol.class);
     }
 
-    public BufferListWritable call(final BufferListWritable request)
+    public BufferListWritable call(String protocol, BufferListWritable request)
       throws IOException {
+      Responder responder = responders.get(protocol);
+      if (responder == null)
+        throw new IOException("No responder for: "+protocol);
       return new BufferListWritable(responder.respond(request.buffers));
     }
+
   }
 
   public Object[] call(Method method, Object[][] params,
@@ -212,6 +225,32 @@ public class AvroRpcEngine implements Rp
     throw new UnsupportedOperationException();
   }
 
+  private class Server extends WritableRpcEngine.Server {
+    private TunnelResponder responder = new TunnelResponder();
+
+    public Server(Class<?> iface, Object impl, String bindAddress,
+                  int port, int numHandlers, int numReaders,
+                  int queueSizePerHandler, boolean verbose,
+                  Configuration conf, 
+                  SecretManager<? extends TokenIdentifier> secretManager
+                  ) throws IOException {
+      super((Class)null, new Object(), conf,
+            bindAddress, port, numHandlers, numReaders,
+            queueSizePerHandler, verbose, secretManager);
+      super.addProtocol(TunnelProtocol.class, responder);
+      responder.addProtocol(iface, impl);
+    }
+
+
+    @Override
+    public <PROTO, IMPL extends PROTO> Server
+      addProtocol(Class<PROTO> protocolClass, IMPL protocolImpl)
+        throws IOException {
+      responder.addProtocol(protocolClass, protocolImpl);
+      return this;
+    }
+  }
+
   /** Construct a server for a protocol implementation instance listening on a
    * port and address. */
   public RPC.Server getServer(Class<?> iface, Object impl, String bindAddress,
@@ -220,10 +259,9 @@ public class AvroRpcEngine implements Rp
                               Configuration conf, 
                        SecretManager<? extends TokenIdentifier> secretManager
                               ) throws IOException {
-    return ENGINE.getServer(TunnelProtocol.class,
-                            new TunnelResponder(iface, impl),
-                            bindAddress, port, numHandlers, numReaders,
-                            queueSizePerHandler, verbose, conf, secretManager);
+    return new Server
+      (iface, impl, bindAddress, port, numHandlers, numReaders,
+       queueSizePerHandler, verbose, conf, secretManager);
   }
 
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Thu Oct  6 01:16:48 2011
@@ -23,8 +23,6 @@ import java.net.Socket;
 import java.net.InetSocketAddress;
 import java.net.SocketTimeoutException;
 import java.net.UnknownHostException;
-import java.net.ConnectException;
-
 import java.io.IOException;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
@@ -235,8 +233,11 @@ public class Client {
       this.remoteId = remoteId;
       this.server = remoteId.getAddress();
       if (server.isUnresolved()) {
-        throw new UnknownHostException("unknown host: " + 
-                                       remoteId.getAddress().getHostName());
+        throw NetUtils.wrapException(remoteId.getAddress().getHostName(),
+            remoteId.getAddress().getPort(),
+            null,
+            0,
+            new UnknownHostException());
       }
       this.rpcTimeout = remoteId.getRpcTimeout();
       this.maxIdleTime = remoteId.getMaxIdleTime();
@@ -1084,7 +1085,12 @@ public class Client {
           call.error.fillInStackTrace();
           throw call.error;
         } else { // local exception
-          throw wrapException(remoteId.getAddress(), call.error);
+          InetSocketAddress address = remoteId.getAddress();
+          throw NetUtils.wrapException(address.getHostName(),
+                  address.getPort(),
+                  NetUtils.getHostname(),
+                  0,
+                  call.error);
         }
       } else {
         return call.value;
@@ -1093,37 +1099,6 @@ public class Client {
   }
 
   /**
-   * Take an IOException and the address we were trying to connect to
-   * and return an IOException with the input exception as the cause.
-   * The new exception provides the stack trace of the place where 
-   * the exception is thrown and some extra diagnostics information.
-   * If the exception is ConnectException or SocketTimeoutException, 
-   * return a new one of the same type; Otherwise return an IOException.
-   * 
-   * @param addr target address
-   * @param exception the relevant exception
-   * @return an exception to throw
-   */
-  private IOException wrapException(InetSocketAddress addr,
-                                         IOException exception) {
-    if (exception instanceof ConnectException) {
-      //connection refused; include the host:port in the error
-      return (ConnectException)new ConnectException(
-           "Call to " + addr + " failed on connection exception: " + exception)
-                    .initCause(exception);
-    } else if (exception instanceof SocketTimeoutException) {
-      return (SocketTimeoutException)new SocketTimeoutException(
-           "Call to " + addr + " failed on socket timeout exception: "
-                      + exception).initCause(exception);
-    } else {
-      return (IOException)new IOException(
-           "Call to " + addr + " failed on local exception: " + exception)
-                                 .initCause(exception);
-
-    }
-  }
-
-  /** 
    * @deprecated Use {@link #call(Writable[], InetSocketAddress[], 
    * Class, UserGroupInformation, Configuration)} instead 
    */

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java Thu Oct  6 01:16:48 2011
@@ -605,7 +605,7 @@ public class RPC {
      * @param protocolImpl - the impl of the protocol that will be called
      * @return the server (for convenience)
      */
-    public <PROTO extends VersionedProtocol, IMPL extends PROTO>
+    public <PROTO, IMPL extends PROTO>
       Server addProtocol(Class<PROTO> protocolClass, IMPL protocolImpl
     ) throws IOException {
       throw new IOException("addProtocol Not Implemented");

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Thu Oct  6 01:16:48 2011
@@ -51,8 +51,6 @@ import java.util.Random;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
@@ -70,6 +68,7 @@ import org.apache.hadoop.io.WritableUtil
 import org.apache.hadoop.ipc.RPC.VersionMismatch;
 import org.apache.hadoop.ipc.metrics.RpcDetailedMetrics;
 import org.apache.hadoop.ipc.metrics.RpcMetrics;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@@ -227,20 +226,11 @@ public abstract class Server {
                           int backlog) throws IOException {
     try {
       socket.bind(address, backlog);
-    } catch (BindException e) {
-      BindException bindException = new BindException("Problem binding to " + address
-                                                      + " : " + e.getMessage());
-      bindException.initCause(e);
-      throw bindException;
     } catch (SocketException e) {
-      // If they try to bind to a different host's address, give a better
-      // error message.
-      if ("Unresolved address".equals(e.getMessage())) {
-        throw new UnknownHostException("Invalid hostname for server: " + 
-                                       address.getHostName());
-      } else {
-        throw e;
-      }
+      throw NetUtils.wrapException(null,
+          0,
+          address.getHostName(),
+          address.getPort(), e);
     }
   }
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java Thu Oct  6 01:16:48 2011
@@ -388,8 +388,9 @@ public class WritableRpcEngine implement
       }
       protocolImplMap.put(new ProtoNameVer(protocolName, version),
           new ProtoClassProtoImpl(protocolClass, protocolImpl)); 
-      LOG.info("ProtocolImpl=" + protocolImpl.getClass().getName() + 
-          " protocolClass=" + protocolClass.getName() + " version=" + version);
+      LOG.info("Protocol Name = " + protocolName +  " version=" + version +
+          " ProtocolImpl=" + protocolImpl.getClass().getName() + 
+          " protocolClass=" + protocolClass.getName());
     }
     
     private static class VerProtocolImpl {
@@ -555,7 +556,7 @@ public class WritableRpcEngine implement
 
  
     @Override
-    public <PROTO extends VersionedProtocol, IMPL extends PROTO> Server
+    public <PROTO, IMPL extends PROTO> Server
       addProtocol(
         Class<PROTO> protocolClass, IMPL protocolImpl) throws IOException {
       registerProtocolAndImpl(protocolClass, protocolImpl);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java Thu Oct  6 01:16:48 2011
@@ -117,13 +117,15 @@ public class JMXJsonServlet extends Http
 
   private static final long serialVersionUID = 1L;
 
-  // ----------------------------------------------------- Instance Variables
   /**
    * MBean server.
    */
-  protected transient MBeanServer mBeanServer = null;
+  protected transient MBeanServer mBeanServer;
 
-  // --------------------------------------------------------- Public Methods
+  /**
+   * Json Factory to create Json generators for write objects in json format
+   */
+  protected transient JsonFactory jsonFactory;
   /**
    * Initialize this servlet.
    */
@@ -131,6 +133,7 @@ public class JMXJsonServlet extends Http
   public void init() throws ServletException {
     // Retrieve the MBean server
     mBeanServer = ManagementFactory.getPlatformMBeanServer();
+    jsonFactory = new JsonFactory();
   }
 
   /**
@@ -149,53 +152,45 @@ public class JMXJsonServlet extends Http
           response)) {
         return;
       }
+      JsonGenerator jg = null;
+      try {
+        response.setContentType("application/json; charset=utf8");
 
-      response.setContentType("application/json; charset=utf8");
-
-      PrintWriter writer = response.getWriter();
+        PrintWriter writer = response.getWriter();
+        jg = jsonFactory.createJsonGenerator(writer);
+        jg.useDefaultPrettyPrinter();
+        jg.writeStartObject();
 
-      JsonFactory jsonFactory = new JsonFactory();
-      JsonGenerator jg = jsonFactory.createJsonGenerator(writer);
-      jg.useDefaultPrettyPrinter();
-      jg.writeStartObject();
-      if (mBeanServer == null) {
-        jg.writeStringField("result", "ERROR");
-        jg.writeStringField("message", "No MBeanServer could be found");
-        jg.close();
-        LOG.error("No MBeanServer could be found.");
-        response.setStatus(HttpServletResponse.SC_NOT_FOUND);
-        return;
-      }
-      
-      // query per mbean attribute
-      String getmethod = request.getParameter("get");
-      if (getmethod != null) {
-        String[] splitStrings = getmethod.split("\\:\\:");
-        if (splitStrings.length != 2) {
-          jg.writeStringField("result", "ERROR");
-          jg.writeStringField("message", "query format is not as expected.");
-          jg.close();
-          response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+        // query per mbean attribute
+        String getmethod = request.getParameter("get");
+        if (getmethod != null) {
+          String[] splitStrings = getmethod.split("\\:\\:");
+          if (splitStrings.length != 2) {
+            jg.writeStringField("result", "ERROR");
+            jg.writeStringField("message", "query format is not as expected.");
+            response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+            return;
+          }
+          listBeans(jg, new ObjectName(splitStrings[0]), splitStrings[1],
+              response);
           return;
         }
-        listBeans(jg, new ObjectName(splitStrings[0]), splitStrings[1],
-            response);
-        jg.close();
-        return;
-      }
 
-      // query per mbean
-      String qry = request.getParameter("qry");
-      if (qry == null) {
-        qry = "*:*";
+        // query per mbean
+        String qry = request.getParameter("qry");
+        if (qry == null) {
+          qry = "*:*";
+        }
+        listBeans(jg, new ObjectName(qry), null, response);
+      } finally {
+        if (jg != null) {
+          jg.close();
+        }
       }
-      listBeans(jg, new ObjectName(qry), null, response);
-      jg.close();
-
-    } catch ( IOException e ) {
+    } catch (IOException e) {
       LOG.error("Caught an exception while processing JMX request", e);
       response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
-    } catch ( MalformedObjectNameException e ) {
+    } catch (MalformedObjectNameException e) {
       LOG.error("Caught an exception while processing JMX request", e);
       response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
     }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Thu Oct  6 01:16:48 2011
@@ -20,12 +20,15 @@ package org.apache.hadoop.net;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.net.BindException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.NetworkInterface;
+import java.net.NoRouteToHostException;
 import java.net.Socket;
 import java.net.SocketAddress;
 import java.net.SocketException;
+import java.net.SocketTimeoutException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.UnknownHostException;
@@ -54,6 +57,13 @@ public class NetUtils {
   
   private static Map<String, String> hostToResolved = 
                                      new HashMap<String, String>();
+  /** text to point users elsewhere: {@value} */
+  private static final String FOR_MORE_DETAILS_SEE
+      = " For more details see:  ";
+  /** text included in wrapped exceptions if the host is null: {@value} */
+  public static final String UNKNOWN_HOST = "(unknown)";
+  /** Base URL of the Hadoop Wiki: {@value} */
+  public static final String HADOOP_WIKI = "http://wiki.apache.org/hadoop/";
 
   /**
    * Get the socket factory for the given class according to its
@@ -537,4 +547,119 @@ public class NetUtils {
     }
     return local;
   }
+
+  /**
+   * Take an IOException , the local host port and remote host port details and
+   * return an IOException with the input exception as the cause and also
+   * include the host details. The new exception provides the stack trace of the
+   * place where the exception is thrown and some extra diagnostics information.
+   * If the exception is BindException or ConnectException or
+   * UnknownHostException or SocketTimeoutException, return a new one of the
+   * same type; Otherwise return an IOException.
+   *
+   * @param destHost target host (nullable)
+   * @param destPort target port
+   * @param localHost local host (nullable)
+   * @param localPort local port
+   * @param exception the caught exception.
+   * @return an exception to throw
+   */
+  public static IOException wrapException(final String destHost,
+                                          final int destPort,
+                                          final String localHost,
+                                          final int localPort,
+                                          final IOException exception) {
+    if (exception instanceof BindException) {
+      return new BindException(
+          "Problem binding to ["
+              + localHost
+              + ":"
+              + localPort
+              + "] "
+              + exception
+              + ";"
+              + see("BindException"));
+    } else if (exception instanceof ConnectException) {
+      // connection refused; include the host:port in the error
+      return (ConnectException) new ConnectException(
+          "Call From "
+              + localHost
+              + " to "
+              + destHost
+              + ":"
+              + destPort
+              + " failed on connection exception: "
+              + exception
+              + ";"
+              + see("ConnectionRefused"))
+          .initCause(exception);
+    } else if (exception instanceof UnknownHostException) {
+      return (UnknownHostException) new UnknownHostException(
+          "Invalid host name: "
+              + getHostDetailsAsString(destHost, destPort, localHost)
+              + exception
+              + ";"
+              + see("UnknownHost"))
+          .initCause(exception);
+    } else if (exception instanceof SocketTimeoutException) {
+      return (SocketTimeoutException) new SocketTimeoutException(
+          "Call From "
+              + localHost + " to " + destHost + ":" + destPort
+              + " failed on socket timeout exception: " + exception
+              + ";"
+              + see("SocketTimeout"))
+          .initCause(exception);
+    } else if (exception instanceof NoRouteToHostException) {
+      return (NoRouteToHostException) new NoRouteToHostException(
+          "No Route to Host from  "
+              + localHost + " to " + destHost + ":" + destPort
+              + " failed on socket timeout exception: " + exception
+              + ";"
+              + see("NoRouteToHost"))
+          .initCause(exception);
+    }
+    else {
+      return (IOException) new IOException("Failed on local exception: "
+                                               + exception
+                                               + "; Host Details : "
+                                               + getHostDetailsAsString(destHost, destPort, localHost))
+          .initCause(exception);
+
+    }
+  }
+
+  private static String see(final String entry) {
+    return FOR_MORE_DETAILS_SEE + HADOOP_WIKI + entry;
+  }
+
+  /**
+   * Get the host details as a string
+   * @param destHost destinatioon host (nullable)
+   * @param destPort destination port
+   * @param localHost local host (nullable)
+   * @return a string describing the destination host:port and the local host
+   */
+  private static String getHostDetailsAsString(final String destHost,
+                                               final int destPort,
+                                               final String localHost) {
+    StringBuilder hostDetails = new StringBuilder(27);
+    hostDetails.append("local host is: ")
+        .append(quoteHost(localHost))
+        .append("; ");
+    hostDetails.append("destination host is: \"").append(quoteHost(destHost))
+        .append(":")
+        .append(destPort).append("; ");
+    return hostDetails.toString();
+  }
+
+  /**
+   * Quote a hostname if it is not null
+   * @param hostname the hostname; nullable
+   * @return a quoted hostname or {@link #UNKNOWN_HOST} if the hostname is null
+   */
+  private static String quoteHost(final String hostname) {
+    return (hostname != null) ?
+        ("\"" + hostname + "\"")
+        : UNKNOWN_HOST;
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/hadoop.control/preinst
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/hadoop.control/preinst?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/hadoop.control/preinst (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/deb/hadoop.control/preinst Thu Oct  6 01:16:48 2011
@@ -15,4 +15,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -g 123 -r hadoop

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/hadoop-setup-conf.sh Thu Oct  6 01:16:48 2011
@@ -51,6 +51,10 @@ usage: $0 <parameters>
      --taskscheduler=org.apache.hadoop.mapred.JobQueueTaskScheduler  Set task scheduler
      --datanodes=hostname1,hostname2,...                             SET the datanodes
      --tasktrackers=hostname1,hostname2,...                          SET the tasktrackers
+     --dfs-webhdfs-enabled=false|true                                Enable webhdfs
+     --dfs-support-append=false|true                                 Enable append
+     --hadoop-proxy-users='user1:groups:hosts;user2:groups:hosts'    Setup proxy users for hadoop
+     --hbase-user=hbase                                              User which hbase is running as. Defaults to hbase
   "
   exit 1
 }
@@ -60,9 +64,11 @@ check_permission() {
   OWNER="0"
   RESULT=0
   while [ "$TARGET" != "/" ]; do
-    PARENT=`dirname $TARGET`
-    NAME=`basename $TARGET`
-    OWNER=`ls -ln $PARENT | grep $NAME| awk '{print $3}'`
+    if [ "`uname`" = "Darwin" ]; then
+      OWNER=`stat -f %u $TARGET`
+    else
+      OWNER=`stat -c %u $TARGET`
+    fi
     if [ "$OWNER" != "0" ]; then
       RESULT=1
       break
@@ -74,6 +80,9 @@ check_permission() {
 
 template_generator() {
   REGEX='(\$\{[a-zA-Z_][a-zA-Z_0-9]*\})'
+  if [ -e $2 ]; then
+    mv -f $2 "$2.bak"
+  fi
   cat $1 |
   while read line ; do
     while [[ "$line" =~ $REGEX ]] ; do
@@ -85,6 +94,78 @@ template_generator() {
   done
 }
 
+#########################################
+# Function to modify a value of a field in an xml file
+# Params: $1 is the file with full path; $2 is the property, $3 is the new value
+#########################################
+function addPropertyToXMLConf
+{
+  #read the file name with full path
+  local file=$1
+  #get the property name
+  local property=$2
+  #get what value should be set for that
+  local propValue=$3
+  #get the description
+  local desc=$4
+  #get the value for the final tag
+  local finalVal=$5
+
+  #create the property text, make sure the / are escaped
+  propText="<property>\n<name>$property<\/name>\n<value>$propValue<\/value>"
+  #if description is not empty add it
+  if [ ! -z $desc ]
+  then
+    propText="${propText}<description>$desc<\/description>\n"
+  fi
+  
+  #if final is not empty add it
+  if [ ! -z $finalVal ]
+  then
+    propText="${propText}final>$finalVal<\/final>\n"
+  fi
+
+  #add the ending tag
+  propText="${propText}<\/property>\n"
+
+  #add the property to the file
+  endText="<\/configuration>"
+  #add the text using sed at the end of the file
+  sed -i "s|$endText|$propText$endText|" $file
+}
+
+##########################################
+# Function to setup up the proxy user settings
+#########################################
+function setupProxyUsers
+{
+  #if hadoop proxy users are sent, setup hadoop proxy
+  if [ ! -z $HADOOP_PROXY_USERS ]
+  then
+    oldIFS=$IFS
+    IFS=';'
+    #process each proxy config
+    for proxy in $HADOOP_PROXY_USERS
+    do
+      #get the user, group and hosts information for each proxy
+      IFS=':'
+      arr=($proxy)
+      user="${arr[0]}"
+      groups="${arr[1]}"
+      hosts="${arr[2]}"
+      #determine the property names and values
+      proxy_groups_property="hadoop.proxyuser.${user}.groups"
+      proxy_groups_val="$groups"
+      addPropertyToXMLConf "${HADOOP_CONF_DIR}/hdfs-site.xml" "$proxy_groups_property" "$proxy_groups_val"
+      proxy_hosts_property="hadoop.proxyuser.${user}.hosts"
+      proxy_hosts_val="$hosts"
+      addPropertyToXMLConf "${HADOOP_CONF_DIR}/hdfs-site.xml" "$proxy_hosts_property" "$proxy_hosts_val"
+      IFS=';'
+    done
+    IFS=$oldIFS
+  fi
+}
+
 OPTS=$(getopt \
   -n $0 \
   -o '' \
@@ -113,6 +194,10 @@ OPTS=$(getopt \
   -l 'kinit-location:' \
   -l 'datanodes:' \
   -l 'tasktrackers:' \
+  -l 'dfs-webhdfs-enabled:' \
+  -l 'hadoop-proxy-users:' \
+  -l 'dfs-support-append:' \
+  -l 'hbase-user:' \
   -o 'h' \
   -- "$@") 
   
@@ -232,6 +317,22 @@ while true ; do
       AUTOMATED=1
       TASKTRACKERS=$(echo $TASKTRACKERS | tr ',' ' ')
       ;;
+    --dfs-webhdfs-enabled)
+      DFS_WEBHDFS_ENABLED=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --hadoop-proxy-users)
+      HADOOP_PROXY_USERS=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --dfs-support-append)
+      DFS_SUPPORT_APPEND=$2; shift 2
+      AUTOMATED=1
+      ;;
+    --hbase-user)
+      HBASE_USER=$2; shift 2
+      AUTOMATED=1
+      ;;
     --)
       shift ; break
       ;;
@@ -247,6 +348,7 @@ AUTOSETUP=${AUTOSETUP:-1}
 JAVA_HOME=${JAVA_HOME:-/usr/java/default}
 HADOOP_GROUP=${HADOOP_GROUP:-hadoop}
 HADOOP_NN_HOST=${HADOOP_NN_HOST:-`hostname`}
+HADOOP_SNN_HOST=${HADOOP_SNN_HOST:-`hostname`}
 HADOOP_NN_DIR=${HADOOP_NN_DIR:-/var/lib/hadoop/hdfs/namenode}
 HADOOP_DN_DIR=${HADOOP_DN_DIR:-/var/lib/hadoop/hdfs/datanode}
 HADOOP_JT_HOST=${HADOOP_JT_HOST:-`hostname`}
@@ -259,9 +361,14 @@ HADOOP_REPLICATION=${HADOOP_RELICATION:-
 HADOOP_TASK_SCHEDULER=${HADOOP_TASK_SCHEDULER:-org.apache.hadoop.mapred.JobQueueTaskScheduler}
 HADOOP_HDFS_USER=${HADOOP_HDFS_USER:-hdfs}
 HADOOP_MR_USER=${HADOOP_MR_USER:-mr}
+DFS_WEBHDFS_ENABLED=${DFS_WEBHDFS_ENABLED:-false}
+DFS_SUPPORT_APPEND=${DFS_SUPPORT_APPEND:-false}
+HBASE_USER=${HBASE_USER:-hbase}
 KEYTAB_DIR=${KEYTAB_DIR:-/etc/security/keytabs}
 HDFS_KEYTAB=${HDFS_KEYTAB:-/home/hdfs/hdfs.keytab}
 MR_KEYTAB=${MR_KEYTAB:-/home/mr/mr.keytab}
+DFS_WEBHDFS_ENABLED=${DFS_WEBHDFS_ENABLED:-false}
+DFS_SUPPORT_APPEND=${DFS_SUPPORT_APPEND:-false}
 KERBEROS_REALM=${KERBEROS_REALM:-KERBEROS.EXAMPLE.COM}
 SECURITY_TYPE=${SECURITY_TYPE:-simple}
 KINIT=${KINIT:-/usr/kerberos/bin/kinit}
@@ -270,13 +377,18 @@ if [ "${SECURITY_TYPE}" = "kerberos" ]; 
   HADOOP_DN_ADDR="0.0.0.0:1019"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:1022"
   SECURITY="true"
+  HADOOP_SECURE_DN_USER=${HADOOP_HDFS_USER}
 else
   TASK_CONTROLLER="org.apache.hadoop.mapred.DefaultTaskController"
-  HADDOP_DN_ADDR="0.0.0.0:50010"
+  HADOOP_DN_ADDR="0.0.0.0:50010"
   HADOOP_DN_HTTP_ADDR="0.0.0.0:50075"
   SECURITY="false"
+  HADOOP_SECURE_DN_USER=""
 fi
 
+#unset env vars
+unset HADOOP_CLIENT_OPTS HADOOP_NAMENODE_OPTS HADOOP_JOBTRACKER_OPTS HADOOP_TASKTRACKER_OPTS HADOOP_DATANODE_OPTS HADOOP_SECONDARYNAMENODE_OPTS HADOOP_JAVA_PLATFORM_OPTS
+
 if [ "${AUTOMATED}" != "1" ]; then
   echo "Setup Hadoop Configuration"
   echo
@@ -383,46 +495,6 @@ if [ "${AUTOSETUP}" == "1" -o "${AUTOSET
   chmod 755 ${HADOOP_LOG_DIR}/${HADOOP_MR_USER}
   chown ${HADOOP_MR_USER}:${HADOOP_GROUP} ${HADOOP_LOG_DIR}/${HADOOP_MR_USER}
 
-  if [ -e ${HADOOP_CONF_DIR}/core-site.xml ]; then
-    mv -f ${HADOOP_CONF_DIR}/core-site.xml ${HADOOP_CONF_DIR}/core-site.xml.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/hdfs-site.xml ]; then
-    mv -f ${HADOOP_CONF_DIR}/hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/mapred-site.xml ]; then
-    mv -f ${HADOOP_CONF_DIR}/mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/hadoop-env.sh ]; then
-    mv -f ${HADOOP_CONF_DIR}/hadoop-env.sh ${HADOOP_CONF_DIR}/hadoop-env.sh.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/hadoop-policy.xml ]; then
-    mv -f ${HADOOP_CONF_DIR}/hadoop-policy.xml ${HADOOP_CONF_DIR}/hadoop-policy.xml.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/mapred-queue-acls.xml ]; then
-    mv -f ${HADOOP_CONF_DIR}/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/commons-logging.properties ]; then
-    mv -f ${HADOOP_CONF_DIR}/commons-logging.properties ${HADOOP_CONF_DIR}/commons-logging.properties.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/taskcontroller.cfg  ]; then
-    mv -f ${HADOOP_CONF_DIR}/taskcontroller.cfg  ${HADOOP_CONF_DIR}/taskcontroller.cfg.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/slaves  ]; then
-    mv -f ${HADOOP_CONF_DIR}/slaves  ${HADOOP_CONF_DIR}/slaves.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/dfs.include  ]; then
-    mv -f ${HADOOP_CONF_DIR}/dfs.include  ${HADOOP_CONF_DIR}/dfs.include.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/dfs.exclude  ]; then
-    mv -f ${HADOOP_CONF_DIR}/dfs.exclude  ${HADOOP_CONF_DIR}/dfs.exclude.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/mapred.include  ]; then
-    mv -f ${HADOOP_CONF_DIR}/mapred.include  ${HADOOP_CONF_DIR}/mapred.include.bak
-  fi
-  if [ -e ${HADOOP_CONF_DIR}/mapred.exclude  ]; then
-    mv -f ${HADOOP_CONF_DIR}/mapred.exclude  ${HADOOP_CONF_DIR}/mapred.exclude.bak
-  fi
-
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/core-site.xml ${HADOOP_CONF_DIR}/core-site.xml
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hdfs-site.xml ${HADOOP_CONF_DIR}/hdfs-site.xml
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-site.xml ${HADOOP_CONF_DIR}/mapred-site.xml
@@ -431,7 +503,13 @@ if [ "${AUTOSETUP}" == "1" -o "${AUTOSET
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/commons-logging.properties ${HADOOP_CONF_DIR}/commons-logging.properties
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/taskcontroller.cfg ${HADOOP_CONF_DIR}/taskcontroller.cfg
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/log4j.properties ${HADOOP_CONF_DIR}/log4j.properties
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
 
+  #setup up the proxy users
+  setupProxyUsers
+  
   #set the owner of the hadoop dir to root
   chown root ${HADOOP_PREFIX}
   chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/hadoop-env.sh
@@ -474,15 +552,12 @@ else
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/mapred-queue-acls.xml ${HADOOP_CONF_DIR}/mapred-queue-acls.xml
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/taskcontroller.cfg ${HADOOP_CONF_DIR}/taskcontroller.cfg
   template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
-  if [ ! -e ${HADOOP_CONF_DIR}/capacity-scheduler.xml ]; then
-    template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
-  fi
-  if [ ! -e ${HADOOP_CONF_DIR}/hadoop-metrics2.properties ]; then
-    cp ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
-  fi
-  if [ ! -e ${HADOOP_CONF_DIR}/log4j.properties ]; then
-    cp ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/log4j.properties ${HADOOP_CONF_DIR}/log4j.properties
-  fi
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/capacity-scheduler.xml ${HADOOP_CONF_DIR}/capacity-scheduler.xml
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/log4j.properties ${HADOOP_CONF_DIR}/log4j.properties
+  template_generator ${HADOOP_PREFIX}/share/hadoop/common/templates/conf/hadoop-metrics2.properties ${HADOOP_CONF_DIR}/hadoop-metrics2.properties
+  
+  #setup up the proxy users
+  setupProxyUsers
   
   chown root:${HADOOP_GROUP} ${HADOOP_CONF_DIR}/hadoop-env.sh
   chmod 755 ${HADOOP_CONF_DIR}/hadoop-env.sh
@@ -515,6 +590,12 @@ else
   echo "${HADOOP_CONF_DIR}/hdfs-site.xml"
   echo "${HADOOP_CONF_DIR}/mapred-site.xml"
   echo "${HADOOP_CONF_DIR}/hadoop-env.sh"
+  echo "${HADOOP_CONF_DIR}/hadoop-policy.xml"
+  echo "${HADOOP_CONF_DIR}/commons-logging.properties"
+  echo "${HADOOP_CONF_DIR}/taskcontroller.cfg"
+  echo "${HADOOP_CONF_DIR}/capacity-scheduler.xml"
+  echo "${HADOOP_CONF_DIR}/log4j.properties"
+  echo "${HADOOP_CONF_DIR}/hadoop-metrics2.properties"
   echo
   echo " to ${HADOOP_CONF_DIR} on all nodes, and proceed to run hadoop-setup-hdfs.sh on namenode."
 fi

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/spec/hadoop.spec
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/spec/hadoop.spec?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/spec/hadoop.spec (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/rpm/spec/hadoop.spec Thu Oct  6 01:16:48 2011
@@ -132,7 +132,7 @@ mv ${RPM_BUILD_DIR}/%{_final_name}/share
 rm -rf ${RPM_BUILD_DIR}/%{_final_name}/etc
 
 %pre
-getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -g 123 -r hadoop
 
 %post
 bash ${RPM_INSTALL_PREFIX0}/sbin/update-hadoop-env.sh \

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/core-site.xml Thu Oct  6 01:16:48 2011
@@ -58,6 +58,8 @@
         RULE:[2:$1@$0](hdfs@.*${KERBEROS_REALM})s/.*/${HADOOP_HDFS_USER}/
         RULE:[2:$1@$0](mapredqa@.*${KERBEROS_REALM})s/.*/${HADOOP_MR_USER}/
         RULE:[2:$1@$0](hdfsqa@.*${KERBEROS_REALM})s/.*/${HADOOP_HDFS_USER}/
+        RULE:[2:$1@$0](hm@.*${KERBEROS_REALM})s/.*/${HBASE_USER}/
+        RULE:[2:$1@$0](rs@.*${KERBEROS_REALM})s/.*/${HBASE_USER}/
         DEFAULT
     </value>
     <description></description>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hadoop-env.sh Thu Oct  6 01:16:48 2011
@@ -41,22 +41,22 @@ done
 #export HADOOP_NAMENODE_INIT_HEAPSIZE=""
 
 # Extra Java runtime options.  Empty by default.
-export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_OPTS"
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true $HADOOP_CLIENT_OPTS"
 
 # Command specific options appended to HADOOP_OPTS when specified
-export HADOOP_NAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
-HADOOP_JOBTRACKER_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dmapred.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
-HADOOP_TASKTRACKER_OPTS="-Dsecurity.audit.logger=ERROR,console -Dmapred.audit.logger=ERROR,console ${HADOOP_TASKTRACKER_OPTS}"
-HADOOP_DATANODE_OPTS="-Dsecurity.audit.logger=ERROR,DRFAS ${HADOOP_DATANODE_OPTS}"
+export HADOOP_NAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT $HADOOP_NAMENODE_OPTS"
+HADOOP_JOBTRACKER_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT -Dmapred.jobsummary.logger=INFO,JSA $HADOOP_JOBTRACKER_OPTS"
+HADOOP_TASKTRACKER_OPTS="-Dsecurity.audit.logger=ERROR,console -Dmapred.audit.logger=ERROR,console $HADOOP_TASKTRACKER_OPTS"
+HADOOP_DATANODE_OPTS="-Dsecurity.audit.logger=ERROR,DRFAS $HADOOP_DATANODE_OPTS"
 
-export HADOOP_SECONDARYNAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_SECONDARYNAMENODE_OPTS}"
+export HADOOP_SECONDARYNAMENODE_OPTS="-Dsecurity.audit.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT $HADOOP_SECONDARYNAMENODE_OPTS"
 
 # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-export HADOOP_CLIENT_OPTS="-Xmx128m ${HADOOP_CLIENT_OPTS}"
-#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData ${HADOOP_JAVA_PLATFORM_OPTS}"
+export HADOOP_CLIENT_OPTS="-Xmx128m $HADOOP_CLIENT_OPTS"
+#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 
 # On secure datanodes, user to run the datanode as after dropping privileges
-export HADOOP_SECURE_DN_USER=${HADOOP_HDFS_USER}
+export HADOOP_SECURE_DN_USER=${HADOOP_SECURE_DN_USER}
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
 export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/hdfs-site.xml Thu Oct  6 01:16:48 2011
@@ -263,4 +263,14 @@
       excluded.
     </description>
   </property>
+  <property>
+    <name>dfs.webhdfs.enabled</name>
+    <value>${DFS_WEBHDFS_ENABLED}</value>
+    <description>Enable or disable webhdfs. Defaults to false</description>
+  </property>
+  <property>
+    <name>dfs.support.append</name>
+    <value>${DFS_SUPPORT_APPEND}</value>
+    <description>Enable or disable append. Defaults to false</description>
+  </property>
 </configuration>

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/main/packages/templates/conf/log4j.properties Thu Oct  6 01:16:48 2011
@@ -21,15 +21,6 @@ hadoop.root.logger=INFO,console
 hadoop.log.dir=.
 hadoop.log.file=hadoop.log
 
-#
-# Job Summary Appender 
-#
-# Use following logger to send summary to separate file defined by 
-# hadoop.mapreduce.jobsummary.log.file rolled daily:
-# hadoop.mapreduce.jobsummary.logger=INFO,JSA
-# 
-hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
-hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
 
 # Define the root logger to the system property "hadoop.root.logger".
 log4j.rootLogger=${hadoop.root.logger}, EventCounter
@@ -90,19 +81,21 @@ log4j.appender.TLA.layout.ConversionPatt
 #
 #Security appender
 #
+security.audit.logger=INFO,console
 hadoop.security.log.file=SecurityAuth.audit
 log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
 log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
-
 log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
 log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
+
 #new logger
 # Define some default values that can be overridden by system properties
 hadoop.security.logger=INFO,console
-log4j.category.SecurityLogger=${hadoop.security.logger}
 
+#
 # hdfs audit logging
-
+#
 hdfs.audit.logger=INFO,console
 log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
 log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
@@ -112,8 +105,9 @@ log4j.appender.DRFAAUDIT.layout=org.apac
 log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
 log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
 
+#
 # mapred audit logging
-
+#
 mapred.audit.logger=INFO,console
 log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}
 log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
@@ -138,11 +132,6 @@ log4j.appender.MRAUDIT.DatePattern=.yyyy
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
 
-#
-# FSNamesystem Audit logging
-# All audit events are logged at INFO level
-#
-log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
 
 # Custom Logging levels
 
@@ -160,8 +149,14 @@ log4j.logger.org.jets3t.service.impl.res
 log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
 
 #
-# Job Summary Appender
+# Job Summary Appender 
 #
+# Use following logger to send summary to separate file defined by 
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+# hadoop.mapreduce.jobsummary.logger=INFO,JSA
+# 
+hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
 log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
 log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
 log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
@@ -171,26 +166,6 @@ log4j.logger.org.apache.hadoop.mapred.Jo
 log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
 
 #
-# MapReduce Audit Log Appender
-#
-
-# Set the MapReduce audit log filename
-#hadoop.mapreduce.audit.log.file=hadoop-mapreduce.audit.log
-
-# Appender for AuditLogger.
-# Requires the following system properties to be set
-#    - hadoop.log.dir (Hadoop Log directory)
-#    - hadoop.mapreduce.audit.log.file (MapReduce audit log filename)
-
-#log4j.logger.org.apache.hadoop.mapred.AuditLogger=INFO,MRAUDIT
-#log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
-#log4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender
-#log4j.appender.MRAUDIT.File=${hadoop.log.dir}/${hadoop.mapreduce.audit.log.file}
-#log4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd
-#log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
-#log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
-#
 # Yarn ResourceManager Application Summary Log 
 #
 # Set the ResourceManager summary log filename

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Thu Oct  6 01:16:48 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1177128
+/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:1152502-1179483
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java Thu Oct  6 01:16:48 2011
@@ -58,7 +58,7 @@ public class TestConfiguration extends T
   }
   
   private void startConfig() throws IOException{
-    out.write("<?xml version=\"1.1\"?>\n");
+    out.write("<?xml version=\"1.0\"?>\n");
     out.write("<configuration>\n");
   }
 
@@ -221,18 +221,6 @@ public class TestConfiguration extends T
     assertEquals("this  contains a comment", conf.get("my.comment"));
   }
   
-  public void testControlAInValue() throws IOException {
-    out = new BufferedWriter(new FileWriter(CONFIG));
-    startConfig();
-    appendProperty("my.char", "&#1;");
-    appendProperty("my.string", "some&#1;string");
-    endConfig();
-    Path fileResource = new Path(CONFIG);
-    conf.addResource(fileResource);
-    assertEquals("\u0001", conf.get("my.char"));
-    assertEquals("some\u0001string", conf.get("my.string"));
-  }
-
   public void testTrim() throws IOException {
     out=new BufferedWriter(new FileWriter(CONFIG));
     startConfig();
@@ -310,7 +298,7 @@ public class TestConfiguration extends T
     conf.writeXml(baos);
     String result = baos.toString();
     assertTrue("Result has proper header", result.startsWith(
-        "<?xml version=\"1.1\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>"));
+        "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><configuration>"));
     assertTrue("Result has proper footer", result.endsWith("</configuration>"));
   }
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLifecycle.java Thu Oct  6 01:16:48 2011
@@ -56,16 +56,14 @@ public class TestHttpServerLifecycle ext
    *
    * @throws Throwable on failure
    */
-  @Test public void testStartedServerIsAlive() throws Throwable {
+  @Test
+  public void testStartedServerIsAlive() throws Throwable {
     HttpServer server = null;
-    try {
-      server = createTestServer();
-      assertNotLive(server);
-      server.start();
-      assertAlive(server);
-    } finally {
-      stop(server);
-    }
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    assertAlive(server);
+    stop(server);
   }
 
   /**
@@ -105,4 +103,24 @@ public class TestHttpServerLifecycle ext
     assertNotLive(server);
   }
 
+  /**
+   * Test that the server is alive once started
+   * 
+   * @throws Throwable
+   *           on failure
+   */
+  @Test
+  public void testWepAppContextAfterServerStop() throws Throwable {
+    HttpServer server = null;
+    String key = "test.attribute.key";
+    String value = "test.attribute.value";
+    server = createTestServer();
+    assertNotLive(server);
+    server.start();
+    server.setAttribute(key, value);
+    assertAlive(server);
+    assertEquals(value, server.getAttribute(key));
+    stop(server);
+    assertNull("Server context should have cleared", server.getAttribute(key));
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java Thu Oct  6 01:16:48 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.io.retry;
 import static org.junit.Assert.*;
 
 import java.io.IOException;
+import java.util.concurrent.CountDownLatch;
 
 import org.apache.hadoop.io.retry.UnreliableImplementation.TypeOfExceptionToFailWith;
 import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException;
@@ -35,22 +36,41 @@ public class TestFailoverProxy {
     private Object impl1;
     private Object impl2;
     
+    private boolean latchEnabled = false;
+    private CountDownLatch getProxyLatch;
+    private int failoversOccurred = 0;
+    
     public FlipFlopProxyProvider(Class<?> iface, Object activeImpl,
-        Object standbyImpl) {
+        Object standbyImpl, int getProxyCountDown) {
       this.iface = iface;
       this.impl1 = activeImpl;
       this.impl2 = standbyImpl;
       currentlyActive = impl1;
+      getProxyLatch = new CountDownLatch(getProxyCountDown);
+    }
+    
+    public FlipFlopProxyProvider(Class<?> iface, Object activeImpl,
+        Object standbyImpl) {
+      this(iface, activeImpl, standbyImpl, 0);
     }
     
     @Override
     public Object getProxy() {
+      if (latchEnabled) {
+        getProxyLatch.countDown();
+        try {
+          getProxyLatch.await();
+        } catch (InterruptedException e) {
+          throw new RuntimeException(e);
+        }
+      }
       return currentlyActive;
     }
 
     @Override
-    public void performFailover(Object currentProxy) {
+    public synchronized void performFailover(Object currentProxy) {
       currentlyActive = impl1 == currentProxy ? impl2 : impl1;
+      failoversOccurred++;
     }
 
     @Override
@@ -63,6 +83,13 @@ public class TestFailoverProxy {
       // Nothing to do.
     }
     
+    public void setLatchEnabled(boolean latchEnabled) {
+      this.latchEnabled = latchEnabled;
+    }
+    
+    public int getFailoversOccurred() {
+      return failoversOccurred;
+    }
   }
   
   public static class FailOverOnceOnAnyExceptionPolicy implements RetryPolicy {
@@ -186,4 +213,55 @@ public class TestFailoverProxy {
     // IOException and this method is idempotent.
     assertEquals("impl2", unreliable.succeedsOnceThenFailsReturningStringIdempotent());
   }
-}
+  
+  private static class ConcurrentMethodThread extends Thread {
+    
+    private UnreliableInterface unreliable;
+    public String result;
+    
+    public ConcurrentMethodThread(UnreliableInterface unreliable) {
+      this.unreliable = unreliable;
+    }
+    
+    public void run() {
+      try {
+        result = unreliable.failsIfIdentifierDoesntMatch("impl2");
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
+  /**
+   * Test that concurrent failed method invocations only result in a single
+   * failover.
+   */
+  @Test
+  public void testConcurrentMethodFailures() throws InterruptedException {
+    FlipFlopProxyProvider proxyProvider = new FlipFlopProxyProvider(
+        UnreliableInterface.class,
+        new UnreliableImplementation("impl1",
+            TypeOfExceptionToFailWith.STANDBY_EXCEPTION),
+        new UnreliableImplementation("impl2",
+            TypeOfExceptionToFailWith.STANDBY_EXCEPTION),
+        2);
+    
+    final UnreliableInterface unreliable = (UnreliableInterface)RetryProxy
+      .create(UnreliableInterface.class, proxyProvider,
+          RetryPolicies.failoverOnNetworkException(10));
+
+    ConcurrentMethodThread t1 = new ConcurrentMethodThread(unreliable);
+    ConcurrentMethodThread t2 = new ConcurrentMethodThread(unreliable);
+    
+    // Getting a proxy will now wait on a latch.
+    proxyProvider.setLatchEnabled(true);
+    
+    t1.start();
+    t2.start();
+    t1.join();
+    t2.join();
+    assertEquals("impl2", t1.result);
+    assertEquals("impl2", t2.result);
+    assertEquals(1, proxyProvider.getFailoversOccurred());
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableImplementation.java Thu Oct  6 01:16:48 2011
@@ -141,4 +141,23 @@ public class UnreliableImplementation im
     }
   }
 
+  @Override
+  public String failsIfIdentifierDoesntMatch(String identifier)
+      throws UnreliableException, StandbyException, IOException {
+    if (this.identifier.equals(identifier)) {
+      return identifier;
+    } else {
+      switch (exceptionToFailWith) {
+      case STANDBY_EXCEPTION:
+        throw new StandbyException(identifier);
+      case UNRELIABLE_EXCEPTION:
+        throw new UnreliableException(identifier);
+      case IO_EXCEPTION:
+        throw new IOException(identifier);
+      default:
+        throw new RuntimeException(identifier);
+      }
+    }
+  }
+
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/UnreliableInterface.java Thu Oct  6 01:16:48 2011
@@ -63,4 +63,8 @@ public interface UnreliableInterface {
       throws UnreliableException, StandbyException, IOException;
   public String succeedsTenTimesThenFailsReturningString()
       throws UnreliableException, StandbyException, IOException;
+  
+  @Idempotent
+  public String failsIfIdentifierDoesntMatch(String identifier)
+      throws UnreliableException, StandbyException, IOException;
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAvroRpc.java Thu Oct  6 01:16:48 2011
@@ -43,6 +43,7 @@ import org.apache.hadoop.security.Securi
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
 
 /** Unit tests for AvroRpc. */
 public class TestAvroRpc extends TestCase {
@@ -56,6 +57,9 @@ public class TestAvroRpc extends TestCas
 
   public TestAvroRpc(String name) { super(name); }
 	
+  public static interface EmptyProtocol {}
+  public static class EmptyImpl implements EmptyProtocol {}
+
   public static class TestImpl implements AvroTestProtocol {
 
     public void ping() {}
@@ -93,10 +97,12 @@ public class TestAvroRpc extends TestCas
       sm = new TestTokenSecretManager();
     }
     UserGroupInformation.setConfiguration(conf);
+    RPC.setProtocolEngine(conf, EmptyProtocol.class, AvroRpcEngine.class);
     RPC.setProtocolEngine(conf, AvroTestProtocol.class, AvroRpcEngine.class);
-    Server server = RPC.getServer(AvroTestProtocol.class,
-                                  new TestImpl(), ADDRESS, 0, 5, true, 
-                                  conf, sm);
+    RPC.Server server = RPC.getServer(EmptyProtocol.class, new EmptyImpl(),
+                                      ADDRESS, 0, 5, true, conf, sm);
+    server.addProtocol(AvroTestProtocol.class, new TestImpl());
+
     try {
       server.start();
       InetSocketAddress addr = NetUtils.getConnectAddress(server);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java?rev=1179484&r1=1179483&r2=1179484&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java Thu Oct  6 01:16:48 2011
@@ -23,7 +23,6 @@ import org.apache.commons.logging.*;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.net.NetUtils;
 
@@ -270,7 +269,7 @@ public class TestIPC {
       fail("Expected an exception to have been thrown");
     } catch (IOException e) {
       String message = e.getMessage();
-      String addressText = address.toString();
+      String addressText = address.getHostName() + ":" + address.getPort();
       assertTrue("Did not find "+addressText+" in "+message,
               message.contains(addressText));
       Throwable cause=e.getCause();



Mime
View raw message