hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sur...@apache.org
Subject svn commit: r1362639 [3/3] - in /hadoop/common/branches/branch-1-win: ./ bin/ ivy/ lib/jdiff/ src/c++/libhdfs/ src/c++/libhdfs/m4/ src/c++/pipes/ src/c++/pipes/impl/ src/c++/task-controller/impl/ src/c++/utils/ src/c++/utils/impl/ src/contrib/streaming...
Date Tue, 17 Jul 2012 20:36:12 GMT
Modified: hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLog.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLog.java (original)
+++ hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskLog.java Tue Jul 17 20:36:07 2012
@@ -23,9 +23,11 @@ import java.io.BufferedReader;
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
+import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Enumeration;
@@ -193,8 +195,22 @@ public class TaskLog {
         new HashMap<LogName, LogFileDetail>();
 
     File indexFile = getIndexFile(taskid, isCleanup);
-    BufferedReader fis = new BufferedReader(new InputStreamReader(
-      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid))));
+    BufferedReader fis;
+    try {
+      fis = new BufferedReader(new InputStreamReader(
+        SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid))));
+    } catch (FileNotFoundException ex) {
+      LOG.warn("Index file for the log of " + taskid + " does not exist.");
+
+      //Assume no task reuse is used and files exist on attemptdir
+      StringBuffer input = new StringBuffer();
+      input.append(LogFileDetail.LOCATION
+                     + getAttemptDir(taskid, isCleanup) + "\n");
+      for (LogName logName : LOGS_TRACKED_BY_INDEX_FILES) {
+        input.append(logName + ":0 -1\n");
+      }
+      fis = new BufferedReader(new StringReader(input.toString()));
+    }
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
     //stdout:<start-offset in the stdout file> <length>
@@ -202,7 +218,7 @@ public class TaskLog {
     //syslog:<start-offset in the syslog file> <length>
     String str = fis.readLine();
     if (str == null) { //the file doesn't have anything
-      throw new IOException ("Index file for the log of " + taskid+" doesn't exist.");
+      throw new IOException ("Index file for the log of " + taskid+" is empty.");
     }
     String loc = str.substring(str.indexOf(LogFileDetail.LOCATION)+
         LogFileDetail.LOCATION.length());

Modified: hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapred/TaskTracker.java Tue Jul 17 20:36:07 2012
@@ -417,6 +417,8 @@ public class TaskTracker implements MRCo
 
   private ShuffleServerInstrumentation shuffleServerMetrics;
 
+  private ShuffleExceptionTracker shuffleExceptionTracking;
+
   private TaskTrackerInstrumentation myInstrumentation = null;
 
   public TaskTrackerInstrumentation getTaskTrackerInstrumentation() {
@@ -1478,9 +1480,33 @@ public class TaskTracker implements MRCo
       conf.get("mapreduce.reduce.shuffle.catch.exception.stack.regex");
     String exceptionMsgRegex =
       conf.get("mapreduce.reduce.shuffle.catch.exception.message.regex");
+    // Percent of shuffle exceptions (out of sample size) seen before it's
+    // fatal - acceptable values are from 0 to 1.0, 0 disables the check.
+    // ie. 0.3 = 30% of the last X number of requests matched the exception,
+    // so abort.
+    float shuffleExceptionLimit =
+      conf.getFloat(
+          "mapreduce.reduce.shuffle.catch.exception.percent.limit.fatal", 0);
+    if ((shuffleExceptionLimit > 1) || (shuffleExceptionLimit < 0)) {
+      throw new IllegalArgumentException(
+          "mapreduce.reduce.shuffle.catch.exception.percent.limit.fatal "
+              + " must be between 0 and 1.0");
+    }
+
+    // The number of trailing requests we track, used for the fatal
+    // limit calculation
+    int shuffleExceptionSampleSize =
+      conf.getInt("mapreduce.reduce.shuffle.catch.exception.sample.size", 1000);
+    if (shuffleExceptionSampleSize <= 0) {
+      throw new IllegalArgumentException(
+          "mapreduce.reduce.shuffle.catch.exception.sample.size "
+              + " must be greater than 0");
+    }
+    shuffleExceptionTracking =
+      new ShuffleExceptionTracker(shuffleExceptionSampleSize, exceptionStackRegex,
+          exceptionMsgRegex, shuffleExceptionLimit );
 
-    server.setAttribute("exceptionStackRegex", exceptionStackRegex);
-    server.setAttribute("exceptionMsgRegex", exceptionMsgRegex);
+    server.setAttribute("shuffleExceptionTracking", shuffleExceptionTracking);
 
     server.addInternalServlet("mapOutput", "/mapOutput", MapOutputServlet.class);
     server.addServlet("taskLog", "/tasklog", TaskLogServlet.class);
@@ -3814,10 +3840,8 @@ public class TaskTracker implements MRCo
         (ShuffleServerInstrumentation) context.getAttribute("shuffleServerMetrics");
       TaskTracker tracker = 
         (TaskTracker) context.getAttribute("task.tracker");
-      String exceptionStackRegex =
-        (String) context.getAttribute("exceptionStackRegex");
-      String exceptionMsgRegex =
-        (String) context.getAttribute("exceptionMsgRegex");
+      ShuffleExceptionTracker shuffleExceptionTracking =
+        (ShuffleExceptionTracker) context.getAttribute("shuffleExceptionTracking");
 
       verifyRequest(request, response, tracker, jobId);
 
@@ -3930,7 +3954,9 @@ public class TaskTracker implements MRCo
                            ") failed :\n"+
                            StringUtils.stringifyException(ie));
         log.warn(errorMsg);
-        checkException(ie, exceptionMsgRegex, exceptionStackRegex, shuffleMetrics);
+        if (shuffleExceptionTracking.checkException(ie)) {
+          shuffleMetrics.exceptionsCaught();
+        }
         if (isInputException) {
           tracker.mapOutputLost(TaskAttemptID.forName(mapId), errorMsg);
         }
@@ -3951,40 +3977,10 @@ public class TaskTracker implements MRCo
         }
       }
       outStream.close();
+      shuffleExceptionTracking.success();
       shuffleMetrics.successOutput();
     }
     
-    protected void checkException(IOException ie, String exceptionMsgRegex,
-        String exceptionStackRegex, ShuffleServerInstrumentation shuffleMetrics) {
-      // parse exception to see if it looks like a regular expression you
-      // configure. If both msgRegex and StackRegex set then make sure both
-      // match, otherwise only the one set has to match.
-      if (exceptionMsgRegex != null) {
-        String msg = ie.getMessage();
-        if (msg == null || !msg.matches(exceptionMsgRegex)) {
-          return;
-        }
-      }
-      if (exceptionStackRegex != null
-          && !checkStackException(ie, exceptionStackRegex)) {
-        return;
-      }
-      shuffleMetrics.exceptionsCaught();
-    }
-
-    private boolean checkStackException(IOException ie,
-        String exceptionStackRegex) {
-      StackTraceElement[] stack = ie.getStackTrace();
-
-      for (StackTraceElement elem : stack) {
-        String stacktrace = elem.toString();
-        if (stacktrace.matches(exceptionStackRegex)) {
-          return true;
-        }
-      }
-      return false;
-    }
-
 
     /**
      * verify that request has correct HASH for the url

Modified: hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java (original)
+++ hadoop/common/branches/branch-1-win/src/mapred/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java Tue Jul 17 20:36:07 2012
@@ -422,7 +422,6 @@ public abstract class FileInputFormat<K,
    */
   public static Path[] getInputPaths(JobContext context) {
     String dirs = context.getConfiguration().get("mapred.input.dir", "");
-    System.out.println("****" + dirs);
     String [] list = StringUtils.split(dirs);
     Path[] result = new Path[list.length];
     for (int i = 0; i < list.length; i++) {

Modified: hadoop/common/branches/branch-1-win/src/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/Makefile.am?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/Makefile.am (original)
+++ hadoop/common/branches/branch-1-win/src/native/Makefile.am Tue Jul 17 20:36:07 2012
@@ -33,6 +33,7 @@ export PLATFORM = $(shell echo $$OS_NAME
 
 AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
               -Isrc/org/apache/hadoop/io/compress/zlib \
+              -Isrc/org/apache/hadoop/io/compress/snappy \
               -Isrc/org/apache/hadoop/io/nativeio \
               -Isrc/org/apache/hadoop/security
 AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
@@ -41,13 +42,15 @@ AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_D
 lib_LTLIBRARIES = libhadoop.la
 libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
                        src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
+                       src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c \
+                       src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c \
                        src/org/apache/hadoop/security/getGroup.c \
                        src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
                        src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
                        src/org/apache/hadoop/io/nativeio/file_descriptor.c \
                        src/org/apache/hadoop/io/nativeio/errno_enum.c \
                        src/org/apache/hadoop/io/nativeio/NativeIO.c
-libhadoop_la_LDFLAGS = -version-info 1:0:0
+libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
 libhadoop_la_LIBADD = -ldl -ljvm
 
 #

Modified: hadoop/common/branches/branch-1-win/src/native/configure.ac
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/configure.ac?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/configure.ac (original)
+++ hadoop/common/branches/branch-1-win/src/native/configure.ac Tue Jul 17 20:36:07 2012
@@ -55,6 +55,8 @@ JNI_LDFLAGS=""
 if test $JAVA_HOME != ""
 then
   JNI_LDFLAGS="-L$JAVA_HOME/jre/lib/$OS_ARCH/server"
+  JVMSOPATH=`find $JAVA_HOME/jre/ -name libjvm.so | head -n 1`
+  JNI_LDFLAGS="$JNI_LDFLAGS -L`dirname $JVMSOPATH`"
 fi
 ldflags_bak=$LDFLAGS
 LDFLAGS="$LDFLAGS $JNI_LDFLAGS"
@@ -87,6 +89,9 @@ AC_SUBST([JNI_CPPFLAGS])
 dnl Check for zlib headers
 AC_CHECK_HEADERS([zlib.h zconf.h], AC_COMPUTE_NEEDED_DSO(z,HADOOP_ZLIB_LIBRARY), AC_MSG_ERROR(Zlib headers were not found... native-hadoop library needs zlib to build. Please install the requisite zlib development package.))
 
+dnl Check for snappy headers
+AC_CHECK_HEADERS([snappy-c.h], AC_COMPUTE_NEEDED_DSO(snappy,HADOOP_SNAPPY_LIBRARY), AC_MSG_WARN(Snappy headers were not found... building without snappy.))
+
 dnl Check for headers needed by the native Group resolution implementation
 AC_CHECK_HEADERS([fcntl.h stdlib.h string.h unistd.h], [], AC_MSG_ERROR(Some system headers not found... please ensure their presence on your platform.))
 

Modified: hadoop/common/branches/branch-1-win/src/native/packageNativeHadoop.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/packageNativeHadoop.sh?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/packageNativeHadoop.sh (original)
+++ hadoop/common/branches/branch-1-win/src/native/packageNativeHadoop.sh Tue Jul 17 20:36:07 2012
@@ -62,4 +62,17 @@ then 
   done  
 fi
 
+if [ "${BUNDLE_SNAPPY_LIB}" = "true" ]
+then
+ if [ -d ${SNAPPY_LIB_DIR} ]
+ then
+   echo "Copying Snappy library in ${SNAPPY_LIB_DIR} to $DIST_LIB_DIR/"
+   cd ${SNAPPY_LIB_DIR}
+   $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
+ else
+   echo "Snappy lib directory ${SNAPPY_LIB_DIR} does not exist"
+   exit 1
+ fi
+fi
+
 #vim: ts=2: sw=2: et

Modified: hadoop/common/branches/branch-1-win/src/test/commit-tests
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/commit-tests?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/commit-tests (original)
+++ hadoop/common/branches/branch-1-win/src/test/commit-tests Tue Jul 17 20:36:07 2012
@@ -26,6 +26,7 @@
 **/TestGetFileBlockLocations.java
 **/TestGlobalFilter.java
 **/TestGlobExpander.java
+**/TestGroupsCaching.java
 **/TestHarFileSystem.java
 **/TestHtmlQuoting.java
 **/TestHttpServer.java

Modified: hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml Tue Jul 17 20:36:07 2012
@@ -132,4 +132,9 @@
        <Method name="run" />
        <Bug pattern="DM_EXIT" />
     </Match>
+     <Match>
+       <Class name="org.apache.hadoop.mapred.ShuffleExceptionTracker" />
+       <Method name="doAbort" />
+       <Bug pattern="DM_EXIT" />
+    </Match>
 </FindBugsFilter>

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFileUtil.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/fs/TestFileUtil.java Tue Jul 17 20:36:07 2012
@@ -79,6 +79,9 @@ public class TestFileUtil {
     File linkDir = new File(del, "tmpDir");
     FileUtil.symLink(tmp.toString(), linkDir.toString());
     Assert.assertEquals(5, del.listFiles().length);
+
+    // create a cycle using symlinks. Cycles should be handled
+    FileUtil.symLink(del.toString(), del.toString() + "/" + DIR + "1/cycle");
   }
 
   @After
@@ -312,4 +315,17 @@ public class TestFileUtil {
       //Expected an IOException
     }
   }
+
+  /**
+   * Test that getDU is able to handle cycles caused due to symbolic links
+   * and that directory sizes are not added to the final calculated size
+   * @throws IOException
+   */
+  @Test
+  public void testGetDU() throws IOException {
+    setupDirs();
+
+    long du = FileUtil.getDU(TEST_DIR);
+    Assert.assertEquals(du, 0);
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestAbandonBlock.java Tue Jul 17 20:36:07 2012
@@ -23,11 +23,15 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.util.StringUtils;
 
+import static org.junit.Assert.*;
+
 public class TestAbandonBlock extends junit.framework.TestCase {
   public static final Log LOG = LogFactory.getLog(TestAbandonBlock.class);
   
@@ -68,4 +72,37 @@ public class TestAbandonBlock extends ju
       try{cluster.shutdown();} catch(Exception e) {}
     }
   }
+
+  /** Make sure that the quota is decremented correctly when a block is abandoned */
+  public void testQuotaUpdatedWhenBlockAbandoned() throws IOException {
+    MiniDFSCluster cluster = new MiniDFSCluster(CONF, 2, true, null);
+    FileSystem fs = cluster.getFileSystem();
+    DistributedFileSystem dfs = (DistributedFileSystem)fs;
+
+    try {
+      // Setting diskspace quota to 3MB
+      dfs.setQuota(new Path("/"), FSConstants.QUOTA_DONT_SET, 3 * 1024 * 1024);
+
+      // Start writing a file with 2 replicas to ensure each datanode has one.
+      // Block Size is 1MB.
+      String src = FILE_NAME_PREFIX + "test_quota1";
+      FSDataOutputStream fout = fs.create(new Path(src), true, 4096, (short)2, 1024 * 1024);
+      for (int i = 0; i < 1024; i++) {
+        fout.writeByte(123);
+      }
+
+      // Shutdown one datanode, causing the block abandonment.
+      cluster.getDataNodes().get(0).shutdown();
+
+      // Close the file, new block will be allocated with 2MB pending size.
+      try {
+        fout.close();
+      } catch (QuotaExceededException e) {
+        fail("Unexpected quota exception when closing fout");
+      }
+    } finally {
+      try{fs.close();} catch(Exception e) {}
+      try{cluster.shutdown();} catch(Exception e) {}
+    }
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestDFSClientRetries.java Tue Jul 17 20:36:07 2012
@@ -478,4 +478,35 @@ public class TestDFSClientRetries extend
       server.stop();
     }
   }
+
+  public void testGetFileChecksum() throws Exception {
+    final String f = "/testGetFileChecksum";
+    final Path p = new Path(f);
+
+    final Configuration conf = new Configuration();
+    final MiniDFSCluster cluster = new MiniDFSCluster(conf, 3, true, null);
+    try {
+      cluster.waitActive();
+
+      //create a file
+      final FileSystem fs = cluster.getFileSystem();
+      DFSTestUtil.createFile(fs, p, 1L << 20, (short)3, 20100402L);
+
+      //get checksum
+      final FileChecksum cs1 = fs.getFileChecksum(p);
+      assertTrue(cs1 != null);
+
+      //stop the first datanode
+      final List<LocatedBlock> locatedblocks = DFSClient.callGetBlockLocations(
+          cluster.getNameNode(), f, 0, Long.MAX_VALUE).getLocatedBlocks();
+      final DatanodeInfo first = locatedblocks.get(0).getLocations()[0];
+      cluster.stopDataNode(first.getName());
+
+      //get checksum again
+      final FileChecksum cs2 = fs.getFileChecksum(p);
+      assertEquals(cs1, cs2);
+    } finally {
+      cluster.shutdown();
+    }
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/security/TestDelegationToken.java Tue Jul 17 20:36:07 2012
@@ -69,13 +69,18 @@ public class TestDelegationToken {
     cluster.waitActive();
     cluster.getNameNode().getNamesystem().getDelegationTokenSecretManager()
 				.startThreads();
+    LOG.info("cluster up and running");
   }
 
   @After
   public void tearDown() throws Exception {
+    LOG.info("starting shutting down the cluster");
     if(cluster!=null) {
+      cluster.getNameNode().getNamesystem().getDelegationTokenSecretManager()
+             .stopThreads();
       cluster.shutdown();
     }
+    LOG.info("finished shutting down the cluster");
   }
 
   private Token<DelegationTokenIdentifier> generateDelegationToken(

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestNameNodeMXBean.java Tue Jul 17 20:36:07 2012
@@ -17,23 +17,32 @@
  */
 package org.apache.hadoop.hdfs.server.namenode;
 
+import static org.junit.Assert.*;
+
+import java.io.File;
 import java.lang.management.ManagementFactory;
+import java.util.Collection;
+import java.util.Map;
 
 import javax.management.MBeanServer;
 import javax.management.ObjectName;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.util.VersionInfo;
 
 import org.junit.Test;
+import org.mortbay.util.ajax.JSON;
+
 import junit.framework.Assert;
 
 /**
  * Class for testing {@link NameNodeMXBean} implementation
  */
 public class TestNameNodeMXBean {
+  @SuppressWarnings({ "unchecked", "deprecation" })
   @Test
   public void testNameNodeMXBeanInfo() throws Exception {
     Configuration conf = new Configuration();
@@ -84,8 +93,44 @@ public class TestNameNodeMXBean {
       String deadnodeinfo = (String) (mbs.getAttribute(mxbeanName,
           "DeadNodes"));
       Assert.assertEquals(fsn.getDeadNodes(), deadnodeinfo);
+      // get attribute NameDirStatuses
+      String nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
+          "NameDirStatuses"));
+      Assert.assertEquals(fsn.getNameDirStatuses(), nameDirStatuses);
+      Map<String, Map<String, String>> statusMap =
+        (Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
+      Collection<File> nameDirs = cluster.getNameDirs();
+      for (File nameDir : nameDirs) {
+        System.out.println("Checking for the presence of " + nameDir +
+            " in active name dirs.");
+        assertTrue(statusMap.get("active").containsKey(nameDir.getAbsolutePath()));
+      }
+      assertEquals(2, statusMap.get("active").size());
+      assertEquals(0, statusMap.get("failed").size());
+      
+      // This will cause the first dir to fail.
+      File failedNameDir = nameDirs.toArray(new File[0])[0];
+      assertEquals(0, FileUtil.chmod(failedNameDir.getAbsolutePath(), "000"));
+      cluster.getNameNode().rollEditLog();
+      
+      nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
+          "NameDirStatuses"));
+      statusMap = (Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
+      for (File nameDir : nameDirs) {
+        String expectedStatus =
+            nameDir.equals(failedNameDir) ? "failed" : "active";
+        System.out.println("Checking for the presence of " + nameDir +
+            " in " + expectedStatus + " name dirs.");
+        assertTrue(statusMap.get(expectedStatus).containsKey(
+            nameDir.getAbsolutePath()));
+      }
+      assertEquals(1, statusMap.get("active").size());
+      assertEquals(1, statusMap.get("failed").size());
     } finally {
       if (cluster != null) {
+        for (File dir : cluster.getNameDirs()) {
+          FileUtil.chmod(dir.toString(), "700");
+        }
         cluster.shutdown();
       }
     }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageDirectoryFailure.java Tue Jul 17 20:36:07 2012
@@ -210,4 +210,23 @@ public class TestStorageDirectoryFailure
     checkFileContents("file0");
     checkFileContents("file1");
   }
+
+  @Test
+  /** Test that we abort when there are no valid edit log directories
+   * remaining. */
+  public void testAbortOnNoValidEditDirs() throws IOException {
+    cluster.restartNameNode();
+    assertEquals(0, numRemovedDirs());
+    checkFileCreation("file9");
+    cluster.getNameNode().getFSImage().
+      removeStorageDir(new File(nameDirs.get(0)));
+    cluster.getNameNode().getFSImage().
+      removeStorageDir(new File(nameDirs.get(1)));
+    FSEditLog spyLog = spy(cluster.getNameNode().getFSImage().getEditLog());
+    doNothing().when(spyLog).fatalExit(anyString());
+    cluster.getNameNode().getFSImage().setEditLog(spyLog);
+    cluster.getNameNode().getFSImage().
+      removeStorageDir(new File(nameDirs.get(2)));
+    verify(spyLog, atLeastOnce()).fatalExit(anyString());
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Tue Jul 17 20:36:07 2012
@@ -198,7 +198,21 @@ public class TestWebHdfsFileSystemContra
   }
 
   public void testSeek() throws IOException {
-    final Path p = new Path("/test/testSeek");
+    final Path dir = new Path("/test/testSeek");
+    assertTrue(fs.mkdirs(dir));
+
+    { //test zero file size
+      final Path zero = new Path(dir, "zero");
+      fs.create(zero).close();
+      
+      int count = 0;
+      final FSDataInputStream in = fs.open(zero);
+      for(; in.read() != -1; count++);
+      in.close();
+      assertEquals(0, count);
+    }
+
+    final Path p = new Path(dir, "file");
     createFile(p);
 
     final int one_third = data.length/3;
@@ -269,7 +283,6 @@ public class TestWebHdfsFileSystemContra
       final FSDataInputStream in = fs.open(root);
       in.read();
       fail();
-      fail();
     } catch(IOException e) {
       WebHdfsFileSystem.LOG.info("This is expected.", e);
     }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/compress/TestCodec.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/compress/TestCodec.java Tue Jul 17 20:36:07 2012
@@ -42,7 +42,6 @@ import junit.framework.TestCase;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputBuffer;
@@ -50,18 +49,17 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.RandomDatum;
 import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.io.compress.CompressorStream;
-import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.snappy.LoadSnappy;
 import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
 import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
+import org.apache.hadoop.util.ReflectionUtils;
 
 public class TestCodec extends TestCase {
 
@@ -86,6 +84,19 @@ public class TestCodec extends TestCase 
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
   }
 
+  
+  public void testSnappyCodec() throws IOException {
+    if (LoadSnappy.isAvailable()) {
+      if (LoadSnappy.isLoaded()) {
+        codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
+        codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
+      }
+      else {
+        fail("Snappy native available but Hadoop native not");
+      }
+    }
+  }
+
   public void testGzipCodecWithParam() throws IOException {
     Configuration conf = new Configuration(this.conf);
     ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestIPC.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestIPC.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/ipc/TestIPC.java Tue Jul 17 20:36:07 2012
@@ -29,12 +29,20 @@ import java.util.Random;
 import java.io.DataInput;
 import java.io.IOException;
 import java.net.InetSocketAddress;
+import java.net.Socket;
 import java.net.SocketTimeoutException;
 
+import javax.net.SocketFactory;
+
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 
+import static org.mockito.Mockito.*;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
 /** Unit tests for IPC. */
 public class TestIPC extends TestCase {
   public static final Log LOG =
@@ -235,6 +243,52 @@ public class TestIPC extends TestCase {
     }
   }
 
+  /**
+  * Test that, if a RuntimeException is thrown after creating a socket
+  * but before successfully connecting to the IPC server, that the
+  * failure is handled properly. This is a regression test for
+  * HADOOP-7428 (HADOOP-8294).
+  */
+  public void testRTEDuringConnectionSetup() throws Exception {
+    // Set up a socket factory which returns sockets which
+    // throw an RTE when setSoTimeout is called.
+    SocketFactory spyFactory = spy(NetUtils.getDefaultSocketFactory(conf));
+    Mockito.doAnswer(new Answer<Socket>() {
+      @Override
+      public Socket answer(InvocationOnMock invocation) throws Throwable {
+        Socket s = spy((Socket)invocation.callRealMethod());
+        doThrow(new RuntimeException("Injected fault")).when(s)
+          .setSoTimeout(anyInt());
+        return s;
+      }
+    }).when(spyFactory).createSocket();
+ 
+    Server server = new TestServer(1, true);
+    server.start();
+    try {
+      // Call should fail due to injected exception.
+      InetSocketAddress address = NetUtils.getConnectAddress(server);
+      Client client = new Client(LongWritable.class, conf, spyFactory);
+      try {
+        client.call(new LongWritable(RANDOM.nextLong()),
+                address, null, null, 0, conf);
+        fail("Expected an exception to have been thrown");
+      } catch (Exception e) {
+        LOG.info("caught expected exception", e);
+        assertTrue(StringUtils.stringifyException(e).contains(
+            "Injected fault"));
+      }
+      // Resetting to the normal socket behavior should succeed
+      // (i.e. it should not have cached a half-constructed connection)
+  
+      Mockito.reset(spyFactory);
+      client.call(new LongWritable(RANDOM.nextLong()),
+          address, null, null, 0, conf);
+    } finally {
+      server.stop();
+    }
+  }
+
   public void testIpcTimeout() throws Exception {
     // start server
     Server server = new TestServer(1, true);

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestShuffleExceptionCount.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestShuffleExceptionCount.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestShuffleExceptionCount.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/mapred/TestShuffleExceptionCount.java Tue Jul 17 20:36:07 2012
@@ -17,104 +17,197 @@
  */
 package org.apache.hadoop.mapred;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 import java.io.IOException;
-import org.apache.hadoop.metrics2.MetricsRecordBuilder;
-import static org.apache.hadoop.test.MetricsAsserts.*;
 
 import org.junit.Test;
 
 public class TestShuffleExceptionCount {
 
-  public static class TestMapOutputServlet extends TaskTracker.MapOutputServlet {
+  static boolean abortCalled = false;
+  private final float epsilon = 1e-5f;
+
+  public static class TestShuffleExceptionTracker extends ShuffleExceptionTracker {
+    private static final long serialVersionUID = 1L;
 
-    public void checkException(IOException ie, String exceptionMsgRegex,
-        String exceptionStackRegex, ShuffleServerInstrumentation shuffleMetrics) {
-      super.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-          shuffleMetrics);
+    TestShuffleExceptionTracker(int size, String exceptionStackRegex,
+        String exceptionMsgRegex, float shuffleExceptionLimit) {
+      super(size, exceptionStackRegex,
+          exceptionMsgRegex, shuffleExceptionLimit);
     }
 
+    protected void doAbort() {
+      abortCalled = true;
+  }
   }
 
   @Test
   public void testCheckException() throws IOException, InterruptedException {
-    TestMapOutputServlet testServlet = new TestMapOutputServlet();
-    JobConf conf = new JobConf();
-    conf.setUser("testuser");
-    conf.setJobName("testJob");
-    conf.setSessionId("testSession");
-
-    TaskTracker tt = new TaskTracker();
-    tt.setConf(conf);
-    ShuffleServerInstrumentation shuffleMetrics =
-      ShuffleServerInstrumentation.create(tt);
 
     // first test with only MsgRegex set but doesn't match
     String exceptionMsgRegex = "Broken pipe";
     String exceptionStackRegex = null;
+    TestShuffleExceptionTracker shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0f);
     IOException ie = new IOException("EOFException");
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    MetricsRecordBuilder rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 0, rb);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with only MsgRegex set that does match
     ie = new IOException("Broken pipe");
     exceptionStackRegex = null;
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 1, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with neither set, make sure incremented
     exceptionMsgRegex = null;
     exceptionStackRegex = null;
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 2, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with only StackRegex set doesn't match
     exceptionMsgRegex = null;
     exceptionStackRegex = ".*\\.doesnt\\$SelectSet\\.wakeup.*";
     ie.setStackTrace(constructStackTrace());
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 2, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with only StackRegex set does match
     exceptionMsgRegex = null;
     exceptionStackRegex = ".*\\.SelectorManager\\$SelectSet\\.wakeup.*";
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 3, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with both regex set and matches
     exceptionMsgRegex = "Broken pipe";
     ie.setStackTrace(constructStackTraceTwo());
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 4, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with both regex set and only msg matches
     exceptionStackRegex = ".*[1-9]+BOGUSREGEX";
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 4, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
 
     // test with both regex set and only stack matches
     exceptionStackRegex = ".*\\.SelectorManager\\$SelectSet\\.wakeup.*";
     exceptionMsgRegex = "EOFException";
-    testServlet.checkException(ie, exceptionMsgRegex, exceptionStackRegex,
-        shuffleMetrics);
-    rb = getMetrics(shuffleMetrics);
-    assertCounter("shuffle_exceptions_caught", 4, rb);
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+
+    exceptionMsgRegex = "Broken pipe";
+    ie.setStackTrace(constructStackTraceTwo());
+    shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+  }
+
+  @Test
+  public void testExceptionCount() {
+    String exceptionMsgRegex = "Broken pipe";
+    String exceptionStackRegex = ".*\\.SelectorManager\\$SelectSet\\.wakeup.*";
+    IOException ie = new IOException("Broken pipe");
+    ie.setStackTrace(constructStackTraceTwo());
+
+    TestShuffleExceptionTracker shuffleExceptionTracker = new TestShuffleExceptionTracker(
+        10, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+    assertEquals("shuffleExceptionCount wrong", (float) 1 / (float) 10,
+        shuffleExceptionTracker.getPercentExceptions(), epsilon);
+
+    ie.setStackTrace(constructStackTraceThree());
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+    assertEquals("shuffleExceptionCount wrong", (float) 1 / (float) 10,
+        shuffleExceptionTracker.getPercentExceptions(), epsilon);
+
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+    assertEquals("shuffleExceptionCount wrong", (float) 1 / (float) 10,
+        shuffleExceptionTracker.getPercentExceptions(), epsilon);
+
+    ie.setStackTrace(constructStackTrace());
+    shuffleExceptionTracker.checkException(ie);
+    assertFalse("abort called when set to off", abortCalled);
+    assertEquals("shuffleExceptionCount wrong", (float) 2 / (float) 10,
+        shuffleExceptionTracker.getPercentExceptions(), epsilon);
+
+    shuffleExceptionTracker.checkException(ie);
+    assertTrue("abort not called", abortCalled);
+    assertEquals("shuffleExceptionCount wrong", (float) 3 / (float) 10,
+        shuffleExceptionTracker.getPercentExceptions(), epsilon);
 
   }
 
+  @Test
+  public void testShuffleExceptionTrailing() {
+    String exceptionStackRegex = ".*\\.SelectorManager\\$SelectSet\\.wakeup.*";
+    String exceptionMsgRegex = "Broken pipe";
+    int size = 5;
+    ShuffleExceptionTracker tracker = new ShuffleExceptionTracker(
+        size, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    assertEquals(size, tracker.getNumRequests());
+    assertEquals(0, tracker.getPercentExceptions(), 0);
+    tracker.success();
+    assertEquals(0, tracker.getPercentExceptions(), 0);
+    tracker.exception();
+    assertEquals((float) 1 / (float) size, tracker.getPercentExceptions(), epsilon);
+    tracker.exception();
+    tracker.exception();
+    assertEquals((float) 3 / (float) size, tracker.getPercentExceptions(), epsilon);
+    tracker.exception();
+    tracker.exception();
+    tracker.exception();
+    tracker.exception();
+    assertEquals((float) 5 / (float) size, tracker.getPercentExceptions(), epsilon);
+    // make sure we push out old ones
+    tracker.success();
+    tracker.success();
+    assertEquals((float) 3 / (float) size, tracker.getPercentExceptions(), epsilon);
+    tracker.exception();
+    tracker.exception();
+    tracker.exception();
+    tracker.exception();
+    tracker.exception();
+    assertEquals((float) 5 / (float) size, tracker.getPercentExceptions(), epsilon);
+  }
+
+  @Test
+  public void testShuffleExceptionTrailingSize() {
+    String exceptionStackRegex = ".*\\.SelectorManager\\$SelectSet\\.wakeup.*";
+    String exceptionMsgRegex = "Broken pipe";
+    int size = 1000;
+    ShuffleExceptionTracker tracker = new ShuffleExceptionTracker(
+        size, exceptionStackRegex, exceptionMsgRegex, 0.3f);
+    assertEquals(size, tracker.getNumRequests());
+    tracker.success();
+    tracker.success();
+    tracker.exception();
+    tracker.exception();
+    assertEquals((float) 2 / (float) size, tracker.getPercentExceptions(),
+        epsilon);
+  }
+
+
   /*
    * Construction exception like:
    * java.io.IOException: Broken pipe at
@@ -174,4 +267,18 @@ public class TestShuffleExceptionCount {
     return stack;
   }
 
+  /*
+   * java.io.IOException: Broken pipe at
+   * sun.nio.ch.EPollArrayWrapper.interrupt(Native Method) at
+   * sun.nio.ch.EPollArrayWrapper.interrupt(EPollArrayWrapper.java:256) at
+   * sun.nio.ch.EPollSelectorImpl.wakeup(EPollSelectorImpl.java:175) at
+   */
+  private StackTraceElement[] constructStackTraceThree() {
+    StackTraceElement[] stack = new StackTraceElement[3];
+    stack[0] = new StackTraceElement("sun.nio.ch.EPollArrayWrapper", "interrupt", "", -2);
+    stack[1] = new StackTraceElement("sun.nio.ch.EPollArrayWrapper", "interrupt", "EPollArrayWrapper.java", 256);
+    stack[2] = new StackTraceElement("sun.nio.ch.EPollSelectorImpl", "wakeup", "EPollSelectorImpl.java", 175);
+
+    return stack;
+}
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/KerberosTestUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/KerberosTestUtils.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/KerberosTestUtils.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/KerberosTestUtils.java Tue Jul 17 20:36:07 2012
@@ -13,13 +13,15 @@
  */
 package org.apache.hadoop.security.authentication;
 
-import com.sun.security.auth.module.Krb5LoginModule;
 
 import javax.security.auth.Subject;
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
 import javax.security.auth.login.Configuration;
 import javax.security.auth.login.LoginContext;
+
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+
 import java.io.File;
 import java.security.Principal;
 import java.security.PrivilegedActionException;
@@ -88,7 +90,7 @@ public class KerberosTestUtils {
       options.put("debug", "true");
 
       return new AppConfigurationEntry[]{
-        new AppConfigurationEntry(Krb5LoginModule.class.getName(),
+        new AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(),
                                   AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
                                   options),};
     }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java?rev=1362639&r1=1362638&r2=1362639&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestKerberosAuthenticationHandler.java Tue Jul 17 20:36:07 2012
@@ -18,15 +18,17 @@ import org.apache.hadoop.security.authen
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
 import junit.framework.TestCase;
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSManager;
 import org.ietf.jgss.GSSName;
 import org.junit.Ignore;
 import org.mockito.Mockito;
-import sun.security.jgss.GSSUtil;
+import org.ietf.jgss.Oid;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
+import java.lang.reflect.Field;
 import java.util.Properties;
 import java.util.concurrent.Callable;
 
@@ -116,9 +118,12 @@ public class TestKerberosAuthenticationH
         GSSContext gssContext = null;
         try {
           String servicePrincipal = KerberosTestUtils.getServerPrincipal();
-          GSSName serviceName = gssManager.createName(servicePrincipal, GSSUtil.NT_GSS_KRB5_PRINCIPAL);
-          gssContext = gssManager.createContext(serviceName, GSSUtil.GSS_KRB5_MECH_OID, null,
-                                                GSSContext.DEFAULT_LIFETIME);
+          Oid oid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
+          GSSName serviceName = gssManager.createName(servicePrincipal,
+              oid);
+          oid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
+          gssContext = gssManager.createContext(serviceName, oid, null,
+                                                  GSSContext.DEFAULT_LIFETIME);
           gssContext.requestCredDeleg(true);
           gssContext.requestMutualAuth(true);
 



Mime
View raw message