hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From szets...@apache.org
Subject svn commit: r1329947 - in /hadoop/common/branches/HDFS-3092: ./ hadoop-client/ hadoop-project/ hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/ hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/
Date Tue, 24 Apr 2012 19:05:16 GMT
Author: szetszwo
Date: Tue Apr 24 19:05:09 2012
New Revision: 1329947

URL: http://svn.apache.org/viewvc?rev=1329947&view=rev
Log:
Merge r1327258 through r1329943 from trunk.

Removed:
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUlimit.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UlimitApp.java
Modified:
    hadoop/common/branches/HDFS-3092/   (props changed)
    hadoop/common/branches/HDFS-3092/hadoop-client/pom.xml
    hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java

Propchange: hadoop/common/branches/HDFS-3092/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1327258-1329943

Modified: hadoop/common/branches/HDFS-3092/hadoop-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-client/pom.xml?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-client/pom.xml Tue Apr 24 19:05:09 2012
@@ -172,10 +172,6 @@
           <artifactId>junit</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>com.cenqua.clover</groupId>
-          <artifactId>clover</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>org.apache.avro</groupId>
           <artifactId>avro</artifactId>
         </exclusion>
@@ -212,10 +208,6 @@
           <artifactId>jersey-guice</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>com.cenqua.clover</groupId>
-          <artifactId>clover</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>com.google.inject.extensions</groupId>
           <artifactId>guice-servlet</artifactId>
         </exclusion>
@@ -263,10 +255,6 @@
           <groupId>com.google.inject.extensions</groupId>
           <artifactId>guice-servlet</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>com.cenqua.clover</groupId>
-          <artifactId>clover</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
 
@@ -291,10 +279,6 @@
           <groupId>com.google.inject.extensions</groupId>
           <artifactId>guice-servlet</artifactId>
         </exclusion>
-        <exclusion>
-          <groupId>com.cenqua.clover</groupId>
-          <artifactId>clover</artifactId>
-        </exclusion>
       </exclusions>
     </dependency>
   </dependencies>

Modified: hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml Tue Apr 24 19:05:09 2012
@@ -604,12 +604,6 @@
       </dependency>
 
       <dependency>
-        <groupId>com.cenqua.clover</groupId>
-        <artifactId>clover</artifactId>
-        <version>3.0.2</version>
-      </dependency>
-
-      <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
         <version>3.4.2</version>
@@ -902,6 +896,7 @@
             <groupId>com.atlassian.maven.plugins</groupId>
             <artifactId>maven-clover2-plugin</artifactId>
             <configuration>
+              <includesAllSourceRoots>true</includesAllSourceRoots>
               <includesTestSourceRoots>true</includesTestSourceRoots>
               <licenseLocation>${cloverLicenseLocation}</licenseLocation>
               <cloverDatabase>${cloverDatabase}</cloverDatabase>
@@ -912,8 +907,8 @@
             </configuration>
             <executions>
               <execution>
-                <id>setup</id>
-                <phase>generate-sources</phase>
+                <id>clover-setup</id>
+                <phase>process-sources</phase>
                 <goals>
                   <goal>setup</goal>
                 </goals>

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HistoryEventEmitter.java
Tue Apr 24 19:05:09 2012
@@ -80,6 +80,8 @@ abstract class HistoryEventEmitter {
     }
 
     counters = counters.replace("\\.", "\\\\.");
+    counters = counters.replace("\\\\{", "\\{");
+    counters = counters.replace("\\\\}", "\\}");
     counters = counters.replace("\\\\(", "\\(");
     counters = counters.replace("\\\\)", "\\)");
     counters = counters.replace("\\\\[", "\\[");

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
Tue Apr 24 19:05:09 2012
@@ -20,12 +20,13 @@ package org.apache.hadoop.streaming;
 
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.util.Map;
+
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.junit.After;
 import org.junit.Before;
 
@@ -38,8 +39,6 @@ public class TestFileArgs extends TestSt
   private MiniDFSCluster dfs = null;
   private MiniMRCluster mr = null;
   private FileSystem fileSys = null;
-  private String strJobTracker = null;
-  private String strNamenode = null;
   private String namenode = null;
   private Configuration conf = null;
 
@@ -56,8 +55,6 @@ public class TestFileArgs extends TestSt
     fileSys = dfs.getFileSystem();
     namenode = fileSys.getUri().getAuthority();
     mr  = new MiniMRCluster(1, namenode, 1);
-    strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
-    strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
 
     map = LS_PATH;
     FileSystem.setDefaultUri(conf, "hdfs://" + namenode);
@@ -100,18 +97,16 @@ public class TestFileArgs extends TestSt
 
   @Override
   protected String[] genArgs() {
+    for (Map.Entry<String, String> entry : mr.createJobConf()) {
+      args.add("-jobconf");
+      args.add(entry.getKey() + "=" + entry.getValue());
+    }
     args.add("-file");
     args.add(new java.io.File("target/sidefile").getAbsolutePath());
     args.add("-numReduceTasks");
     args.add("0");
     args.add("-jobconf");
-    args.add(strNamenode);
-    args.add("-jobconf");
-    args.add(strJobTracker);
-    args.add("-jobconf");
     args.add("mapred.jar=" + STREAMING_JAR);
-    args.add("-jobconf");
-    args.add("mapreduce.framework.name=yarn");
     args.add("-verbose");
     return super.genArgs();
   }

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
Tue Apr 24 19:05:09 2012
@@ -19,14 +19,10 @@
 package org.apache.hadoop.streaming;
 
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.DataOutputStream;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
-import java.util.Arrays;
+import java.util.Map;
 import java.util.zip.ZipEntry;
-import java.util.jar.JarOutputStream;
 import java.util.zip.ZipOutputStream;
 
 import org.apache.commons.logging.Log;
@@ -37,12 +33,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.util.StringUtils;
-
-import org.junit.Test;
-import static org.junit.Assert.*;
 
 /**
  * This class tests cacheArchive option of streaming
@@ -66,8 +57,6 @@ public class TestMultipleArchiveFiles ex
   private MiniDFSCluster dfs = null;
   private MiniMRCluster mr = null;
   private FileSystem fileSys = null;
-  private String strJobTracker = null;
-  private String strNamenode = null;
   private String namenode = null;
 
   public TestMultipleArchiveFiles() throws Exception {
@@ -80,8 +69,6 @@ public class TestMultipleArchiveFiles ex
     fileSys = dfs.getFileSystem();
     namenode = fileSys.getUri().getAuthority();
     mr  = new MiniMRCluster(1, namenode, 1);
-    strJobTracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
-    strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
 
     map = "xargs cat";
     reduce = "cat";
@@ -123,6 +110,10 @@ public class TestMultipleArchiveFiles ex
     String cache1 = workDir + CACHE_ARCHIVE_1 + "#symlink1";
     String cache2 = workDir + CACHE_ARCHIVE_2 + "#symlink2";
 
+    for (Map.Entry<String, String> entry : mr.createJobConf()) {
+      args.add("-jobconf");
+      args.add(entry.getKey() + "=" + entry.getValue());
+    }
     args.add("-jobconf");
     args.add("mapreduce.job.reduces=1");
     args.add("-cacheArchive");
@@ -130,13 +121,7 @@ public class TestMultipleArchiveFiles ex
     args.add("-cacheArchive");
     args.add(cache2);
     args.add("-jobconf");
-    args.add(strNamenode);
-    args.add("-jobconf");
-    args.add(strJobTracker);
-    args.add("-jobconf");
     args.add("mapred.jar=" + STREAMING_JAR);
-    args.add("-jobconf");
-    args.add("mapreduce.framework.name=yarn");
     return super.genArgs();
   }
 

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleCachefiles.java
Tue Apr 24 19:05:09 2012
@@ -22,8 +22,9 @@ import java.io.BufferedReader;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.io.PrintWriter;
-import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
@@ -36,7 +37,7 @@ import org.apache.hadoop.hdfs.MiniDFSClu
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.Utils;
-import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
+
 /**
  * This test case tests the symlink creation
  * utility provided by distributed caching 
@@ -73,15 +74,18 @@ public class TestMultipleCachefiles
       String namenode = fileSys.getUri().toString();
 
       mr  = new MiniMRCluster(1, namenode, 3);
-      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
-      String strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
+
+      List<String> args = new ArrayList<String>();
+      for (Map.Entry<String, String> entry : mr.createJobConf()) {
+        args.add("-jobconf");
+        args.add(entry.getKey() + "=" + entry.getValue());
+      }
+
       String argv[] = new String[] {
         "-input", INPUT_FILE,
         "-output", OUTPUT_DIR,
         "-mapper", map,
         "-reducer", reduce,
-        "-jobconf", strNamenode,
-        "-jobconf", strJobtracker,
         "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
         "-jobconf", 
           JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
@@ -98,9 +102,13 @@ public class TestMultipleCachefiles
         "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
         "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2,
         "-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
-        "-jobconf", "mapreduce.framework.name=yarn"
       };
 
+      for (String arg : argv) {
+        args.add(arg);
+      }
+      argv = args.toArray(new String[args.size()]);
+      
       fileSys.delete(new Path(OUTPUT_DIR), true);
       
       DataOutputStream file = fileSys.create(new Path(INPUT_FILE));

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
Tue Apr 24 19:05:09 2012
@@ -19,6 +19,9 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,20 +50,30 @@ public class TestStreamingTaskLog {
   final long USERLOG_LIMIT_KB = 5;//consider 5kb as logSize
 
   String[] genArgs() {
-    return new String[] {
+
+    List<String> args = new ArrayList<String>();
+    for (Map.Entry<String, String> entry : mr.createJobConf()) {
+      args.add("-jobconf");
+      args.add(entry.getKey() + "=" + entry.getValue());
+    }
+
+    String[] argv = new String[] {
       "-input", inputPath.toString(),
       "-output", outputPath.toString(),
       "-mapper", map,
       "-reducer", StreamJob.REDUCE_NONE,
-      "-jobconf", "mapred.job.tracker=" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS),
-      "-jobconf", "fs.default.name=" + fs.getUri().toString(),
       "-jobconf", "mapred.map.tasks=1",
       "-jobconf", "keep.failed.task.files=true",
       "-jobconf", "mapreduce.task.userlog.limit.kb=" + USERLOG_LIMIT_KB,
       "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
       "-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
-      "-jobconf", "mapreduce.framework.name=yarn"
     };
+
+    for (String arg : argv) {
+      args.add(arg);
+    }
+    argv = args.toArray(new String[args.size()]);
+    return argv;
   }
 
   /**

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java?rev=1329947&r1=1329946&r2=1329947&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
(original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
Tue Apr 24 19:05:09 2012
@@ -21,6 +21,9 @@ package org.apache.hadoop.streaming;
 import java.io.BufferedReader;
 import java.io.DataOutputStream;
 import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
 
 import org.junit.Test;
 import static org.junit.Assert.*;
@@ -62,17 +65,20 @@ public class TestSymLink
       FileSystem fileSys = dfs.getFileSystem();
       String namenode = fileSys.getUri().toString();
       mr  = new MiniMRCluster(1, namenode, 3);
+
+      List<String> args = new ArrayList<String>();
+      for (Map.Entry<String, String> entry : mr.createJobConf()) {
+        args.add("-jobconf");
+        args.add(entry.getKey() + "=" + entry.getValue());
+      }
+
       // During tests, the default Configuration will use a local mapred
       // So don't specify -config or -cluster
-      String strJobtracker = JTConfig.JT_IPC_ADDRESS + "=localhost:" + mr.createJobConf().get(JTConfig.JT_IPC_ADDRESS);
-      String strNamenode = "fs.default.name=" + mr.createJobConf().get("fs.default.name");
       String argv[] = new String[] {
         "-input", INPUT_FILE,
         "-output", OUTPUT_DIR,
         "-mapper", map,
         "-reducer", reduce,
-        "-jobconf", strNamenode,
-        "-jobconf", strJobtracker,
         "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
         "-jobconf", 
           JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
@@ -88,9 +94,13 @@ public class TestSymLink
                      conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
         "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink",
         "-jobconf", "mapred.jar=" + TestStreaming.STREAMING_JAR,
-        "-jobconf", "mapreduce.framework.name=yarn"
       };
 
+      for (String arg : argv) {
+        args.add(arg);
+      }
+      argv = args.toArray(new String[args.size()]);
+  
       fileSys.delete(new Path(OUTPUT_DIR), true);
       
       DataOutputStream file = fileSys.create(new Path(INPUT_FILE));



Mime
View raw message