hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1451695 - in /hadoop/common/branches/branch-trunk-win: ./ dev-support/ hadoop-project/ hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/ hadoop-tools/hadoop-pipes/src/ hadoop-tools/hadoop-rumen/src/main/java/org/apac...
Date Fri, 01 Mar 2013 19:37:05 GMT
Author: suresh
Date: Fri Mar  1 19:37:03 2013
New Revision: 1451695

URL: http://svn.apache.org/r1451695
Log:
Merge trunk to branch-trunk-win

Modified:
    hadoop/common/branches/branch-trunk-win/   (props changed)
    hadoop/common/branches/branch-trunk-win/BUILDING.txt
    hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh
    hadoop/common/branches/branch-trunk-win/hadoop-project/pom.xml
    hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
    hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
    hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

Propchange: hadoop/common/branches/branch-trunk-win/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1448457-1451693

Modified: hadoop/common/branches/branch-trunk-win/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/BUILDING.txt?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/BUILDING.txt (original)
+++ hadoop/common/branches/branch-trunk-win/BUILDING.txt Fri Mar  1 19:37:03 2013
@@ -88,6 +88,33 @@ Maven build goals:
   * -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
 
 ----------------------------------------------------------------------------------
+Building components separately
+
+If you are building a submodule directory, all the hadoop dependencies this
+submodule has will be resolved as all other 3rd party dependencies. This is,
+from the Maven cache or from a Maven repository (if not available in the cache
+or the SNAPSHOT 'timed out').
+An alternative is to run 'mvn install -DskipTests' from Hadoop source top
+level once; and then work from the submodule. Keep in mind that SNAPSHOTs
+time out after a while, using the Maven '-nsu' will stop Maven from trying
+to update SNAPSHOTs from external repos.
+
+----------------------------------------------------------------------------------
+Importing projects to eclipse
+
+When you import the project to eclipse, install hadoop-maven-plugins at first.
+
+  $ cd hadoop-maven-plugins
+  $ mvn install
+
+Then, generate ecplise project files.
+
+  $ mvn eclipse:eclipse -DskipTests
+
+At last, import to eclipse by specifying the root directory of the project via
+[File] > [Import] > [Existing Projects into Workspace].
+
+----------------------------------------------------------------------------------
 Building distributions:
 
 Create binary distribution without native code and without documentation:

Modified: hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/branch-trunk-win/dev-support/test-patch.sh Fri Mar  1 19:37:03
2013
@@ -370,12 +370,12 @@ checkTests () {
     JIRA_COMMENT="$JIRA_COMMENT
 
     {color:green}+1 tests included appear to have a timeout.{color}"
-	return 1
+	return 0
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
   {color:red}-1 one of tests included doesn't have a timeout.{color}"
-  return 0
+  return 1
 }
 
 cleanUpXml () {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-project/pom.xml?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-project/pom.xml Fri Mar  1 19:37:03 2013
@@ -391,9 +391,9 @@
       </dependency>
 
       <dependency>
-        <groupId>org.jboss.netty</groupId>
+        <groupId>io.netty</groupId>
         <artifactId>netty</artifactId>
-        <version>3.2.4.Final</version>
+        <version>3.5.11.Final</version>
       </dependency>
 
       <dependency>

Modified: hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
Fri Mar  1 19:37:03 2013
@@ -33,8 +33,6 @@ import org.apache.hadoop.tools.CopyListi
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.StubContext;
 import org.apache.hadoop.security.Credentials;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -48,9 +46,6 @@ import java.util.Random;
 
 
 public class TestUniformSizeInputFormat {
-  private static final Log LOG
-                = LogFactory.getLog(TestUniformSizeInputFormat.class);
-
   private static MiniDFSCluster cluster;
   private static final int N_FILES = 20;
   private static final int SIZEOF_EACH_FILE=1024;
@@ -118,12 +113,9 @@ public class TestUniformSizeInputFormat 
     List<InputSplit> splits
             = uniformSizeInputFormat.getSplits(jobContext);
 
-    List<InputSplit> legacySplits = legacyGetSplits(listFile, nMaps);
-
     int sizePerMap = totalFileSize/nMaps;
 
     checkSplits(listFile, splits);
-    checkAgainstLegacy(splits, legacySplits);
 
     int doubleCheckedTotalSize = 0;
     int previousSplitSize = -1;
@@ -155,57 +147,6 @@ public class TestUniformSizeInputFormat 
     Assert.assertEquals(totalFileSize, doubleCheckedTotalSize);
   }
 
-  // From
-  // http://svn.apache.org/repos/asf/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
-  private List<InputSplit> legacyGetSplits(Path listFile, int numSplits)
-      throws IOException {
-
-    FileSystem fs = cluster.getFileSystem();
-    FileStatus srcst = fs.getFileStatus(listFile);
-    Configuration conf = fs.getConf();
-
-    ArrayList<InputSplit> splits = new ArrayList<InputSplit>(numSplits);
-    FileStatus value = new FileStatus();
-    Text key = new Text();
-    final long targetsize = totalFileSize / numSplits;
-    long pos = 0L;
-    long last = 0L;
-    long acc = 0L;
-    long cbrem = srcst.getLen();
-    SequenceFile.Reader sl = null;
-
-    LOG.info("Average bytes per map: " + targetsize +
-        ", Number of maps: " + numSplits + ", total size: " + totalFileSize);
-
-    try {
-      sl = new SequenceFile.Reader(conf, SequenceFile.Reader.file(listFile));
-      for (; sl.next(key, value); last = sl.getPosition()) {
-        // if adding this split would put this split past the target size,
-        // cut the last split and put this next file in the next split.
-        if (acc + value.getLen() > targetsize && acc != 0) {
-          long splitsize = last - pos;
-          FileSplit fileSplit = new FileSplit(listFile, pos, splitsize, null);
-          LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + splitsize);
-          splits.add(fileSplit);
-          cbrem -= splitsize;
-          pos = last;
-          acc = 0L;
-        }
-        acc += value.getLen();
-      }
-    }
-    finally {
-      IOUtils.closeStream(sl);
-    }
-    if (cbrem != 0) {
-      FileSplit fileSplit = new FileSplit(listFile, pos, cbrem, null);
-      LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + cbrem);
-      splits.add(fileSplit);
-    }
-
-    return splits;
-  }
-
   private void checkSplits(Path listFile, List<InputSplit> splits) throws IOException
{
     long lastEnd = 0;
 
@@ -233,18 +174,6 @@ public class TestUniformSizeInputFormat 
     }
   }
 
-  private void checkAgainstLegacy(List<InputSplit> splits,
-                                  List<InputSplit> legacySplits)
-      throws IOException, InterruptedException {
-
-    Assert.assertEquals(legacySplits.size(), splits.size());
-    for (int index = 0; index < splits.size(); index++) {
-      FileSplit fileSplit = (FileSplit) splits.get(index);
-      FileSplit legacyFileSplit = (FileSplit) legacySplits.get(index);
-      Assert.assertEquals(fileSplit.getStart(), legacyFileSplit.getStart());
-    }
-  }
-
   @Test
   public void testGetSplits() throws Exception {
     testGetSplits(9);

Modified: hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-pipes/src/CMakeLists.txt?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-pipes/src/CMakeLists.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-pipes/src/CMakeLists.txt Fri
Mar  1 19:37:03 2013
@@ -21,10 +21,10 @@ find_package(OpenSSL REQUIRED)
 
 set(CMAKE_BUILD_TYPE, Release)
 
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -Wall -O2")
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
+set(PIPES_FLAGS "-g -Wall -O2 -D_REENTRANT -D_GNU_SOURCE")
+set(PIPES_FLAGS "${PIPES_FLAGS} -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64")
+set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${PIPES_FLAGS}")
+set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${PIPES_FLAGS}")
 
 include(../../../hadoop-common-project/hadoop-common/src/JNIFlags.cmake NO_POLICY_SCOPE)
 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java?rev=1451695&r1=1451694&r2=1451695&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
(original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
Fri Mar  1 19:37:03 2013
@@ -83,6 +83,9 @@ public class JobBuilder {
   private Map<ParsedHost, ParsedHost> allHosts =
       new HashMap<ParsedHost, ParsedHost>();
 
+  private org.apache.hadoop.mapreduce.jobhistory.JhCounters EMPTY_COUNTERS =
+      new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
+
   /**
    * The number of splits a task can have, before we ignore them all.
    */
@@ -459,7 +462,10 @@ public class JobBuilder {
     TaskFailed t = (TaskFailed)(event.getDatum());
     task.putDiagnosticInfo(t.error.toString());
     task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
-    // No counters in TaskFailedEvent
+    org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+        ((TaskFailed) event.getDatum()).counters;
+    task.incorporateCounters(
+        counters == null ? EMPTY_COUNTERS : counters);
   }
 
   private void processTaskAttemptUnsuccessfulCompletionEvent(
@@ -481,7 +487,10 @@ public class JobBuilder {
     }
 
     attempt.setFinishTime(event.getFinishTime());
-
+    org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+        ((TaskAttemptUnsuccessfulCompletion) event.getDatum()).counters;
+    attempt.incorporateCounters(
+        counters == null ? EMPTY_COUNTERS : counters);
     attempt.arraySetClockSplits(event.getClockSplits());
     attempt.arraySetCpuUsages(event.getCpuUsages());
     attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -489,7 +498,6 @@ public class JobBuilder {
     TaskAttemptUnsuccessfulCompletion t =
         (TaskAttemptUnsuccessfulCompletion) (event.getDatum());
     attempt.putDiagnosticInfo(t.error.toString());
-    // No counters in TaskAttemptUnsuccessfulCompletionEvent
   }
 
   private void processTaskAttemptStartedEvent(TaskAttemptStartedEvent event) {



Mime
View raw message