hadoop-yarn-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From t...@apache.org
Subject svn commit: r1463203 [8/8] - in /hadoop/common/branches/HDFS-347/hadoop-yarn-project: ./ hadoop-yarn/ hadoop-yarn/bin/ hadoop-yarn/conf/ hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ hadoop-yarn/hadoop-yarn-api/src/main/java/org...
Date Mon, 01 Apr 2013 16:47:34 GMT
Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java
Mon Apr  1 16:47:16 2013
@@ -22,7 +22,6 @@ import static org.junit.Assert.fail;
 
 import java.io.File;
 import java.io.FileNotFoundException;
-import java.io.FileWriter;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
@@ -51,6 +50,7 @@ import org.apache.hadoop.security.Access
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.AMRMProtocol;
 import org.apache.hadoop.yarn.api.ContainerManager;
 import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
@@ -69,11 +69,8 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerToken;
 import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
-import org.apache.hadoop.yarn.api.records.URL;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -88,7 +85,6 @@ import org.apache.hadoop.yarn.server.res
 import org.apache.hadoop.yarn.server.resourcemanager.security.ApplicationTokenSecretManager;
 import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
 import org.apache.hadoop.yarn.util.BuilderUtils;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.Records;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -401,10 +397,15 @@ public class TestContainerManagerSecurit
       UnsupportedFileSystemException, YarnRemoteException,
       InterruptedException {
 
+    // Use ping to simulate sleep on Windows.
+    List<String> cmd = Shell.WINDOWS ?
+      Arrays.asList("ping", "-n", "100", "127.0.0.1", ">nul") :
+      Arrays.asList("sleep", "100");
+
     ContainerLaunchContext amContainer = BuilderUtils
         .newContainerLaunchContext(null, "testUser", BuilderUtils
             .newResource(1024, 1), Collections.<String, LocalResource>emptyMap(),
-            new HashMap<String, String>(), Arrays.asList("sleep", "100"),
+            new HashMap<String, String>(), cmd,
             new HashMap<String, ByteBuffer>(), null,
             new HashMap<ApplicationAccessType, String>());
 
@@ -480,14 +481,14 @@ public class TestContainerManagerSecurit
 
     // Request a container allocation.
     List<ResourceRequest> ask = new ArrayList<ResourceRequest>();
-    ask.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0), "*",
-        BuilderUtils.newResource(1024, 1), 1));
+    ask.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
+        ResourceRequest.ANY, BuilderUtils.newResource(1024, 1), 1));
 
     AllocateRequest allocateRequest = BuilderUtils.newAllocateRequest(
         BuilderUtils.newApplicationAttemptId(appID, 1), 0, 0F, ask,
         new ArrayList<ContainerId>());
     List<Container> allocatedContainers = scheduler.allocate(allocateRequest)
-        .getAMResponse().getAllocatedContainers();
+        .getAllocatedContainers();
 
     // Modify ask to request no more.
     allocateRequest.clearAsks();
@@ -499,7 +500,7 @@ public class TestContainerManagerSecurit
       Thread.sleep(1000);
       allocateRequest.setResponseId(allocateRequest.getResponseId() + 1);
       allocatedContainers = scheduler.allocate(allocateRequest)
-          .getAMResponse().getAllocatedContainers();
+          .getAllocatedContainers();
     }
 
     Assert.assertNotNull("Container is not allocted!", allocatedContainers);

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
Mon Apr  1 16:47:16 2013
@@ -27,9 +27,9 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.event.DrainDispatcher;
-import org.apache.hadoop.yarn.server.api.records.HeartbeatResponse;
+import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
+import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
 import org.apache.hadoop.yarn.server.api.records.MasterKey;
-import org.apache.hadoop.yarn.server.api.records.RegistrationResponse;
 import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
@@ -62,12 +62,12 @@ public class TestRMNMSecretKeys {
     rm.start();
 
     MockNM nm = new MockNM("host:1234", 3072, rm.getResourceTrackerService());
-    RegistrationResponse registrationResponse = nm.registerNode();
+    RegisterNodeManagerResponse registrationResponse = nm.registerNode();
     MasterKey masterKey = registrationResponse.getMasterKey();
     Assert.assertNotNull("Registration should cause a key-update!", masterKey);
     dispatcher.await();
 
-    HeartbeatResponse response = nm.nodeHeartbeat(true);
+    NodeHeartbeatResponse response = nm.nodeHeartbeat(true);
     Assert.assertNull(
       "First heartbeat after registration shouldn't get any key updates!",
       response.getMasterKey());

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
Mon Apr  1 16:47:16 2013
@@ -66,6 +66,7 @@ public class WebAppProxyServlet extends 
   public static final String PROXY_USER_COOKIE_NAME = "proxy-user";
 
   private final List<TrackingUriPlugin> trackingUriPlugins;
+  private final String rmAppPageUrlBase;
 
   private static class _ implements Hamlet._ {
     //Empty
@@ -91,6 +92,8 @@ public class WebAppProxyServlet extends 
     this.trackingUriPlugins =
         conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR,
             TrackingUriPlugin.class);
+    this.rmAppPageUrlBase = StringHelper.pjoin(
+        YarnConfiguration.getRMWebAppURL(conf), "cluster", "app");
   }
 
   /**
@@ -291,25 +294,10 @@ public class WebAppProxyServlet extends 
       if (original != null) {
         trackingUri = ProxyUriUtils.getUriFromAMUrl(original);
       }
+      // fallback to ResourceManager's app page if no tracking URI provided
       if(original == null || original.equals("N/A")) {
-        String message;
-        switch(applicationReport.getFinalApplicationStatus()) {
-          case FAILED:
-          case KILLED:
-          case SUCCEEDED:
-            message =
-              "The requested application exited before setting a tracking URL.";
-            break;
-          case UNDEFINED:
-            message = "The requested application does not appear to be running "
-              +"yet, and has not set a tracking URL.";
-            break;
-          default:
-            //This should never happen, but just to be safe
-            message = "The requested application has not set a tracking URL.";
-            break;
-        }
-        notFound(resp, message);
+        resp.sendRedirect(resp.encodeRedirectURL(
+            StringHelper.pjoin(rmAppPageUrlBase, id.toString())));
         return;
       }
 

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/amfilter/TestAmFilter.java
Mon Apr  1 16:47:16 2013
@@ -19,41 +19,39 @@
 package org.apache.hadoop.yarn.server.webproxy.amfilter;
 
 import java.io.IOException;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.concurrent.atomic.AtomicBoolean;
 
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
+import javax.servlet.*;
+import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import junit.framework.Assert;
+import static junit.framework.Assert.*;
 
+import org.apache.hadoop.yarn.server.webproxy.WebAppProxyServlet;
+import org.glassfish.grizzly.servlet.HttpServletResponseImpl;
 import org.junit.Test;
 import org.mockito.Mockito;
 
+/**
+ * Test AmIpFilter. Requests to a no declared hosts should has way through
+ * proxy. Another requests can be filtered with (without) user name.
+ * 
+ */
+public class TestAmFilter {
 
-public class TestAmFilter  {
-
-  private String proxyHost = "bogushost.com";
+  private String proxyHost = "localhost";
   private String proxyUri = "http://bogus";
+  private String doFilterRequest;
+  private AmIpServletRequestWrapper servletWrapper;
 
   private class TestAmIpFilter extends AmIpFilter {
 
     private Set<String> proxyAddresses = null;
 
     protected Set<String> getProxyAddresses() {
-      if(proxyAddresses == null) {
+      if (proxyAddresses == null) {
         proxyAddresses = new HashSet<String>();
       }
       proxyAddresses.add(proxyHost);
@@ -61,12 +59,10 @@ public class TestAmFilter  {
     }
   }
 
-
   private static class DummyFilterConfig implements FilterConfig {
     final Map<String, String> map;
 
-
-    DummyFilterConfig(Map<String,String> map) {
+    DummyFilterConfig(Map<String, String> map) {
       this.map = map;
     }
 
@@ -74,22 +70,24 @@ public class TestAmFilter  {
     public String getFilterName() {
       return "dummy";
     }
+
     @Override
     public String getInitParameter(String arg0) {
       return map.get(arg0);
     }
+
     @Override
     public Enumeration<String> getInitParameterNames() {
       return Collections.enumeration(map.keySet());
     }
+
     @Override
     public ServletContext getServletContext() {
       return null;
     }
   }
 
-
-  @Test
+  @Test(timeout = 5000)
   public void filterNullCookies() throws Exception {
     HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
 
@@ -97,13 +95,12 @@ public class TestAmFilter  {
     Mockito.when(request.getRemoteAddr()).thenReturn(proxyHost);
 
     HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
-
     final AtomicBoolean invoked = new AtomicBoolean();
 
     FilterChain chain = new FilterChain() {
       @Override
-      public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse)
-        throws IOException, ServletException {
+      public void doFilter(ServletRequest servletRequest,
+          ServletResponse servletResponse) throws IOException, ServletException {
         invoked.set(true);
       }
     };
@@ -115,7 +112,93 @@ public class TestAmFilter  {
     Filter filter = new TestAmIpFilter();
     filter.init(conf);
     filter.doFilter(request, response, chain);
-    Assert.assertTrue(invoked.get());
+    assertTrue(invoked.get());
     filter.destroy();
   }
+
+  /**
+   * Test AmIpFilter
+   */
+  @Test(timeout = 1000)
+  public void testFilter() throws Exception {
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(AmIpFilter.PROXY_HOST, proxyHost);
+    params.put(AmIpFilter.PROXY_URI_BASE, proxyUri);
+    FilterConfig config = new DummyFilterConfig(params);
+
+    // dummy filter
+    FilterChain chain = new FilterChain() {
+      @Override
+      public void doFilter(ServletRequest servletRequest,
+          ServletResponse servletResponse) throws IOException, ServletException {
+        doFilterRequest = servletRequest.getClass().getName();
+        if (servletRequest instanceof AmIpServletRequestWrapper) {
+          servletWrapper = (AmIpServletRequestWrapper) servletRequest;
+
+        }
+      }
+    };
+    AmIpFilter testFilter = new AmIpFilter();
+    testFilter.init(config);
+
+    HttpServletResponseForTest response = new HttpServletResponseForTest();
+    // Test request should implements HttpServletRequest
+
+    ServletRequest failRequest = Mockito.mock(ServletRequest.class);
+    try {
+      testFilter.doFilter(failRequest, response, chain);
+      fail();
+    } catch (ServletException e) {
+      assertEquals("This filter only works for HTTP/HTTPS", e.getMessage());
+    }
+
+    // request with HttpServletRequest
+    HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
+    Mockito.when(request.getRemoteAddr()).thenReturn("redirect");
+    Mockito.when(request.getRequestURI()).thenReturn("/redirect");
+    testFilter.doFilter(request, response, chain);
+    // address "redirect" is not in host list
+    assertEquals("http://bogus/redirect", response.getRedirect());
+    // "127.0.0.1" contains in host list. Without cookie
+    Mockito.when(request.getRemoteAddr()).thenReturn("127.0.0.1");
+    testFilter.doFilter(request, response, chain);
+
+    assertTrue(doFilterRequest
+        .contains("javax.servlet.http.HttpServletRequest"));
+    // cookie added
+    Cookie[] cookies = new Cookie[1];
+    cookies[0] = new Cookie(WebAppProxyServlet.PROXY_USER_COOKIE_NAME, "user");
+
+    Mockito.when(request.getCookies()).thenReturn(cookies);
+    testFilter.doFilter(request, response, chain);
+
+    assertEquals(
+        "org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpServletRequestWrapper",
+        doFilterRequest);
+    // request contains principal from cookie
+    assertEquals("user", servletWrapper.getUserPrincipal().getName());
+    assertEquals("user", servletWrapper.getRemoteUser());
+    assertFalse(servletWrapper.isUserInRole(""));
+
+  }
+
+  private class HttpServletResponseForTest extends HttpServletResponseImpl {
+    String redirectLocation = "";
+
+    public String getRedirect() {
+      return redirectLocation;
+    }
+
+    @Override
+    public void sendRedirect(String location) throws IOException {
+      redirectLocation = location;
+    }
+
+    @Override
+    public String encodeRedirectURL(String url) {
+      return url;
+    }
+
+  }
+
 }

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/pom.xml
Mon Apr  1 16:47:16 2013
@@ -28,6 +28,27 @@
   <name>hadoop-yarn-server</name>
   <packaging>pom</packaging>
 
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <environmentVariables>
+            <!-- HADOOP_HOME required for tests on Windows to find winutils -->
+            <HADOOP_HOME>${basedir}/../../../../hadoop-common-project/hadoop-common/target</HADOOP_HOME>
+          </environmentVariables>
+          <properties>
+            <property>
+              <name>listener</name>
+              <value>org.apache.hadoop.test.TimedOutTestsListener</value>
+            </property>
+          </properties>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop</groupId>

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WritingYarnApplications.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WritingYarnApplications.apt.vm?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WritingYarnApplications.apt.vm
(original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WritingYarnApplications.apt.vm
Mon Apr  1 16:47:16 2013
@@ -493,7 +493,7 @@ Hadoop MapReduce Next Generation - Writi
 +---+
     
   * The AllocateResponse sent back from the ResourceManager provides the 
-    following information via the AMResponse object: 
+    following information:
   
     * Reboot flag: For scenarios when the ApplicationMaster may get out of sync 
       with the ResourceManager. 
@@ -511,7 +511,9 @@ Hadoop MapReduce Next Generation - Writi
       allocated container, it will receive an update from the ResourceManager 
       when the container completes. The ApplicationMaster can look into the 
       status of the completed container and take appropriate actions such as 
-      re-trying a particular sub-task in case of a failure. 
+      re-trying a particular sub-task in case of a failure.
+
+    * Number of cluster nodes: The number of hosts available on the cluster.
       
     [] 
       
@@ -525,13 +527,11 @@ Hadoop MapReduce Next Generation - Writi
     containers. 
 
 +---+
-    // Get AMResponse from AllocateResponse 
-    AMResponse amResp = allocateResponse.getAMResponse(); 			
 
     // Retrieve list of allocated containers from the response 
     // and on each allocated container, lets assume we are launching 
     // the same job.
-    List<Container> allocatedContainers = amResp.getAllocatedContainers();
+    List<Container> allocatedContainers = allocateResponse.getAllocatedContainers();
     for (Container allocatedContainer : allocatedContainers) {
       LOG.info("Launching shell command on a new container."
           + ", containerId=" + allocatedContainer.getId()
@@ -553,7 +553,7 @@ Hadoop MapReduce Next Generation - Writi
     }
 
     // Check what the current available resources in the cluster are
-    Resource availableResources = amResp.getAvailableResources();
+    Resource availableResources = allocateResponse.getAvailableResources();
     // Based on this information, an ApplicationMaster can make appropriate 
     // decisions
 
@@ -561,7 +561,7 @@ Hadoop MapReduce Next Generation - Writi
     // Let's assume we are keeping a count of total completed containers, 
     // containers that failed and ones that completed successfully.  			
     List<ContainerStatus> completedContainers = 
-        amResp.getCompletedContainersStatuses();
+        allocateResponse.getCompletedContainersStatuses();
     for (ContainerStatus containerStatus : completedContainers) {				
       LOG.info("Got container status for containerID= " 
           + containerStatus.getContainerId()
@@ -611,7 +611,7 @@ Hadoop MapReduce Next Generation - Writi
        
 +---+
        
-    //Assuming an allocated Container obtained from AMResponse 
+    //Assuming an allocated Container obtained from AllocateResponse
     Container container;   
     // Connect to ContainerManager on the allocated container 
     String cmIpPortStr = container.getNodeId().getHost() + ":" 

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/hadoop-yarn/pom.xml Mon Apr  1 16:47:16
2013
@@ -85,7 +85,7 @@
       <artifactId>guice-servlet</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.jboss.netty</groupId>
+      <groupId>io.netty</groupId>
       <artifactId>netty</artifactId>
     </dependency>
     <dependency>
@@ -159,6 +159,10 @@
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-surefire-plugin</artifactId>
         <configuration>
+          <environmentVariables>
+            <!-- HADOOP_HOME required for tests on Windows to find winutils -->
+            <HADOOP_HOME>${basedir}/../../../hadoop-common-project/hadoop-common/target</HADOOP_HOME>
+          </environmentVariables>
           <properties>
             <property>
               <name>listener</name>

Modified: hadoop/common/branches/HDFS-347/hadoop-yarn-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-347/hadoop-yarn-project/pom.xml?rev=1463203&r1=1463202&r2=1463203&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-347/hadoop-yarn-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-347/hadoop-yarn-project/pom.xml Mon Apr  1 16:47:16 2013
@@ -59,7 +59,7 @@
           <artifactId>ant</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>org.jboss.netty</groupId>
+          <groupId>io.netty</groupId>
           <artifactId>netty</artifactId>
         </exclusion>
         <exclusion>
@@ -149,7 +149,7 @@
       <artifactId>junit</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.jboss.netty</groupId>
+      <groupId>io.netty</groupId>
       <artifactId>netty</artifactId>
     </dependency>
     <dependency>
@@ -180,15 +180,8 @@
               <target if="tar">
                 <!-- Using Unix script to preserve symlinks -->
                 <echo file="${project.build.directory}/dist-maketar.sh">
-
-                  which cygpath 2&gt; /dev/null
-                  if [ $? = 1 ]; then
-                    BUILD_DIR="${project.build.directory}"
-                  else
-                    BUILD_DIR=`cygpath --unix '${project.build.directory}'`
-                  fi
-                  cd $BUILD_DIR
-                  tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+                  cd "${project.build.directory}"
+                  tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
                 </echo>
                 <exec executable="sh" dir="${project.build.directory}" failonerror="true">
                   <arg line="./dist-maketar.sh"/>



Mime
View raw message