hadoop-mapreduce-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1437843 [3/3] - in /hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project: ./ conf/ dev-support/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ hadoop-mapreduce-client/hadoop-mapreduce-clien...
Date Thu, 24 Jan 2013 02:45:59 GMT
Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java
Thu Jan 24 02:45:45 2013
@@ -25,6 +25,7 @@ import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
+import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -83,6 +84,11 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.log4j.Appender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.SimpleLayout;
+import org.apache.log4j.WriterAppender;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.invocation.InvocationOnMock;
@@ -112,6 +118,7 @@ public class TestYARNRunner extends Test
   public void setUp() throws Exception {
     resourceMgrDelegate = mock(ResourceMgrDelegate.class);
     conf = new YarnConfiguration();
+    conf.set(YarnConfiguration.RM_PRINCIPAL, "mapred/host@REALM");
     clientCache = new ClientCache(conf, resourceMgrDelegate);
     clientCache = spy(clientCache);
     yarnRunner = new YARNRunner(conf, resourceMgrDelegate, clientCache);
@@ -188,7 +195,7 @@ public class TestYARNRunner extends Test
 
   @Test
   public void testResourceMgrDelegate() throws Exception {
-    /* we not want a mock of resourcemgr deleagte */
+    /* we not want a mock of resource mgr delegate */
     final ClientRMProtocol clientRMProtocol = mock(ClientRMProtocol.class);
     ResourceMgrDelegate delegate = new ResourceMgrDelegate(conf) {
       @Override
@@ -255,6 +262,9 @@ public class TestYARNRunner extends Test
   
   @Test
   public void testHistoryServerToken() throws Exception {
+    //Set the master principal in the config
+    conf.set(YarnConfiguration.RM_PRINCIPAL,"foo@LOCAL");
+
     final String masterPrincipal = Master.getMasterPrincipal(conf);
 
     final MRClientProtocol hsProxy = mock(MRClientProtocol.class);
@@ -264,7 +274,7 @@ public class TestYARNRunner extends Test
             GetDelegationTokenRequest request =
                 (GetDelegationTokenRequest)invocation.getArguments()[0];
             // check that the renewer matches the cluster's RM principal
-            assertEquals(request.getRenewer(), masterPrincipal);
+            assertEquals(masterPrincipal, request.getRenewer() );
 
             DelegationToken token =
                 recordFactory.newRecordInstance(DelegationToken.class);
@@ -356,4 +366,53 @@ public class TestYARNRunner extends Test
       assertTrue("AM admin command opts is after user command opts.", adminIndex < userIndex);
     }
   }
+  @Test
+  public void testWarnCommandOpts() throws Exception {
+    Logger logger = Logger.getLogger(YARNRunner.class);
+    
+    ByteArrayOutputStream bout = new ByteArrayOutputStream();
+    Layout layout = new SimpleLayout();
+    Appender appender = new WriterAppender(layout, bout);
+    logger.addAppender(appender);
+    
+    JobConf jobConf = new JobConf();
+    
+    jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true -Djava.library.path=foo");
+    jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m -Djava.library.path=bar");
+    
+    YARNRunner yarnRunner = new YARNRunner(jobConf);
+    
+    File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE);
+    OutputStream out = new FileOutputStream(jobxml);
+    conf.writeXml(out);
+    out.close();
+    
+    File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT);
+    out = new FileOutputStream(jobsplit);
+    out.close();
+    
+    File jobsplitmetainfo = new File(testWorkDir, MRJobConfig.JOB_SPLIT_METAINFO);
+    out = new FileOutputStream(jobsplitmetainfo);
+    out.close();
+    
+    File appTokens = new File(testWorkDir, MRJobConfig.APPLICATION_TOKENS_FILE);
+    out = new FileOutputStream(appTokens);
+    out.close();
+    
+    @SuppressWarnings("unused")
+    ApplicationSubmissionContext submissionContext = 
+        yarnRunner.createApplicationSubmissionContext(jobConf, testWorkDir.toString(), new
Credentials());
+   
+    String logMsg = bout.toString();
+    assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
+    		"yarn.app.mapreduce.am.admin-command-opts can cause programs to no " +
+        "longer function if hadoop native libraries are used. These values " + 
+    		"should be set as part of the LD_LIBRARY_PATH in the app master JVM " +
+        "env using yarn.app.mapreduce.am.admin.user.env config settings."));
+    assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + 
+        "yarn.app.mapreduce.am.command-opts can cause programs to no longer " +
+        "function if hadoop native libraries are used. These values should " +
+        "be set as part of the LD_LIBRARY_PATH in the app master JVM env " +
+        "using yarn.app.mapreduce.am.env config settings."));
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/pom.xml
Thu Jan 24 02:45:45 2013
@@ -134,7 +134,15 @@
           <effort>Max</effort>
         </configuration>
       </plugin>
-
+      <plugin>
+        <groupId>org.apache.rat</groupId>
+        <artifactId>apache-rat-plugin</artifactId>
+        <configuration>
+          <excludes>
+            <exclude>src/main/java/org/apache/hadoop/examples/dancing/puzzle1.dta</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
    </plugins>
    </build>
 </project>

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
Thu Jan 24 02:45:45 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.examples;
 import java.io.IOException;
 import java.math.BigDecimal;
 import java.math.RoundingMode;
+import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -77,8 +78,7 @@ public class QuasiMonteCarlo extends Con
   static final String DESCRIPTION
       = "A map/reduce program that estimates Pi using a quasi-Monte Carlo method.";
   /** tmp directory for input/output */
-  static private final Path TMP_DIR = new Path(
-      QuasiMonteCarlo.class.getSimpleName() + "_TMP_3_141592654");
+  static private final String TMP_DIR_PREFIX = QuasiMonteCarlo.class.getSimpleName();
   
   /** 2-dimensional Halton sequence {H(i)},
    * where H(i) is a 2-dimensional point and i >= 1 is the index.
@@ -228,9 +228,9 @@ public class QuasiMonteCarlo extends Con
     @Override
     public void cleanup(Context context) throws IOException {
       //write output to a file
-      Path outDir = new Path(TMP_DIR, "out");
-      Path outFile = new Path(outDir, "reduce-out");
       Configuration conf = context.getConfiguration();
+      Path outDir = new Path(conf.get(FileOutputFormat.OUTDIR));
+      Path outFile = new Path(outDir, "reduce-out");
       FileSystem fileSys = FileSystem.get(conf);
       SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf,
           outFile, LongWritable.class, LongWritable.class, 
@@ -246,7 +246,7 @@ public class QuasiMonteCarlo extends Con
    * @return the estimated value of Pi
    */
   public static BigDecimal estimatePi(int numMaps, long numPoints,
-      Configuration conf
+      Path tmpDir, Configuration conf
       ) throws IOException, ClassNotFoundException, InterruptedException {
     Job job = new Job(conf);
     //setup job conf
@@ -269,14 +269,14 @@ public class QuasiMonteCarlo extends Con
     job.setSpeculativeExecution(false);
 
     //setup input/output directories
-    final Path inDir = new Path(TMP_DIR, "in");
-    final Path outDir = new Path(TMP_DIR, "out");
+    final Path inDir = new Path(tmpDir, "in");
+    final Path outDir = new Path(tmpDir, "out");
     FileInputFormat.setInputPaths(job, inDir);
     FileOutputFormat.setOutputPath(job, outDir);
 
     final FileSystem fs = FileSystem.get(conf);
-    if (fs.exists(TMP_DIR)) {
-      throw new IOException("Tmp directory " + fs.makeQualified(TMP_DIR)
+    if (fs.exists(tmpDir)) {
+      throw new IOException("Tmp directory " + fs.makeQualified(tmpDir)
           + " already exists.  Please remove it first.");
     }
     if (!fs.mkdirs(inDir)) {
@@ -325,7 +325,7 @@ public class QuasiMonteCarlo extends Con
           .multiply(BigDecimal.valueOf(numInside.get()))
           .divide(numTotal, RoundingMode.HALF_UP);
     } finally {
-      fs.delete(TMP_DIR, true);
+      fs.delete(tmpDir, true);
     }
   }
 
@@ -344,12 +344,15 @@ public class QuasiMonteCarlo extends Con
     
     final int nMaps = Integer.parseInt(args[0]);
     final long nSamples = Long.parseLong(args[1]);
+    long now = System.currentTimeMillis();
+    int rand = new Random().nextInt(Integer.MAX_VALUE);
+    final Path tmpDir = new Path(TMP_DIR_PREFIX + "_" + now + "_" + rand);
         
     System.out.println("Number of Maps  = " + nMaps);
     System.out.println("Samples per Map = " + nSamples);
         
     System.out.println("Estimated value of Pi is "
-        + estimatePi(nMaps, nSamples, getConf()));
+        + estimatePi(nMaps, nSamples, tmpDir, getConf()));
     return 0;
   }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
Thu Jan 24 02:45:45 2013
@@ -174,16 +174,16 @@ public class DistributedPentomino extend
       return 2;
     }
     // check for passed parameters, otherwise use defaults
-    int width = PENT_WIDTH;
-    int height = PENT_HEIGHT;
-    int depth = PENT_DEPTH;
+    int width = conf.getInt(Pentomino.WIDTH, PENT_WIDTH);
+    int height = conf.getInt(Pentomino.HEIGHT, PENT_HEIGHT);
+    int depth = conf.getInt(Pentomino.DEPTH, PENT_DEPTH);
     for (int i = 0; i < args.length; i++) {
       if (args[i].equalsIgnoreCase("-depth")) {
-          depth = Integer.parseInt(args[i++].trim());
+        depth = Integer.parseInt(args[++i].trim());
       } else if (args[i].equalsIgnoreCase("-height")) {
-	  height = Integer.parseInt(args[i++].trim());
+        height = Integer.parseInt(args[++i].trim());
       } else if (args[i].equalsIgnoreCase("-width") ) {
-	  width = Integer.parseInt(args[i++].trim()); 
+        width = Integer.parseInt(args[++i].trim());
       }
     }
     // now set the values within conf for M/R tasks to read, this

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java
(original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/test/java/org/apache/hadoop/examples/TestWordStats.java
Thu Jan 24 02:45:45 2013
@@ -1,3 +1,20 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
 package org.apache.hadoop.examples;
 
 import static org.junit.Assert.assertEquals;

Modified: hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/pom.xml?rev=1437843&r1=1437842&r2=1437843&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-mapreduce-project/pom.xml Thu Jan 24 02:45:45
2013
@@ -214,9 +214,11 @@
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>
-          <includes>
-            <include>pom.xml</include>
-          </includes>
+          <excludes>
+            <exclude>.eclipse.templates/</exclude>
+            <exclude>CHANGES.txt</exclude>
+            <exclude>lib/jdiff/**</exclude>
+          </excludes>
         </configuration>
       </plugin>
       <plugin>



Mime
View raw message