chukwa-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject [3/4] chukwa git commit: CHUKWA-743. Revised pid locking using standard Hadoop approach. (Eric Yang)
Date Sat, 18 Apr 2015 05:13:43 GMT
CHUKWA-743. Revised pid locking using standard Hadoop approach.  (Eric Yang)


Project: http://git-wip-us.apache.org/repos/asf/chukwa/repo
Commit: http://git-wip-us.apache.org/repos/asf/chukwa/commit/ecf9b2b6
Tree: http://git-wip-us.apache.org/repos/asf/chukwa/tree/ecf9b2b6
Diff: http://git-wip-us.apache.org/repos/asf/chukwa/diff/ecf9b2b6

Branch: refs/heads/master
Commit: ecf9b2b699a58ad3d5c2efca071fa28b54a92189
Parents: 7ae6839
Author: Eric Yang <eyang@apache.org>
Authored: Sun Apr 12 16:13:56 2015 -0700
Committer: Eric Yang <eyang@apache.org>
Committed: Sun Apr 12 16:13:56 2015 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   2 +
 bin/chukwa                                      |  57 +++-----
 bin/chukwa-daemon.sh                            |   2 +-
 pom.xml                                         |  17 ++-
 .../datacollection/agent/ChukwaAgent.java       |   4 -
 .../datacollection/collector/CollectorStub.java |   3 -
 .../collector/servlet/ServletCollector.java     |   2 -
 .../connector/PipelineConnector.java            |   5 +-
 .../connector/http/HttpConnector.java           |   3 -
 .../datacollection/writer/SeqFileWriter.java    |   4 -
 .../writer/localfs/LocalToRemoteHdfsMover.java  |   3 +-
 .../writer/localfs/LocalWriter.java             |   9 +-
 .../archive/ChukwaArchiveManager.java           |   4 +-
 .../demux/DailyChukwaRecordRolling.java         |   2 -
 .../chukwa/extraction/demux/DemuxManager.java   |   4 +-
 .../demux/HourlyChukwaRecordRolling.java        |   2 -
 .../extraction/demux/PostProcessorManager.java  |   8 +-
 .../hadoop/chukwa/hicc/HiccWebServer.java       |   7 +-
 .../chukwa/inputtools/jplugin/JPluginAgent.java |   2 -
 .../chukwa/inputtools/mdl/TorqueDataLoader.java | 112 ---------------
 .../chukwa/inputtools/plugin/metrics/Exec.java  |   2 -
 .../backfilling/QueueToWriterConnector.java     |   3 +-
 .../hadoop/chukwa/util/DaemonWatcher.java       |  42 ------
 .../org/apache/hadoop/chukwa/util/PidFile.java  | 141 -------------------
 .../datacollection/agent/TestChukwaSsl.java     |   2 +-
 .../datacollection/collector/TestCollector.java |  72 ----------
 .../chukwa/rest/resource/SetupTestEnv.java      |   8 ++
 27 files changed, 62 insertions(+), 460 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 7acc0f8..6113fcb 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -24,6 +24,8 @@ Trunk (unreleased changes)
 
   BUGS
 
+    CHUKWA-743. Revised pid locking using standard Hadoop approach.  (Eric Yang)
+
     CHUKWA-742. Updated license header for StatusCheckerException.java and exclude 
                 README.md from release audit tool scan.  (Eric Yang)
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/bin/chukwa
----------------------------------------------------------------------
diff --git a/bin/chukwa b/bin/chukwa
index db0aade..3b60f14 100755
--- a/bin/chukwa
+++ b/bin/chukwa
@@ -75,55 +75,45 @@ if [ "$HADOOP_CONF_DIR" != "" ]; then
   CLASSPATH=${HADOOP_CONF_DIR}:${CLASSPATH}
 fi
 
-if [ "$CHUKWA_HICC_PORT" != "" ]; then
-  CHUKWA_HICC_PORT=$CHUKWA_HICC_PORT
-fi
-
 if [ "$CHUKWA_HICC_PORT" = "" ]; then
   CHUKWA_HICC_PORT=4080
 fi
 
-BACKGROUND="true"
+if [ "$CHUKWA_STOP_TIMEOUT" = "" ]; then
+  CHUKWA_STOP_TIMEOUT=3
+fi
 
 # configure command parameters
 if [ "$COMMAND" = "agent" ]; then
   APP='agent'
   CLASS='org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent'
-  PID="Agent"
 elif [ "$COMMAND" = "archive" ]; then
   APP='archive'
   CLASS='org.apache.hadoop.chukwa.extraction.archive.ChukwaArchiveManager'
-  PID='ArchiveManager'
 elif [ "$COMMAND" = "collector" ]; then
   APP='collector'
   CLASS='org.apache.hadoop.chukwa.datacollection.collector.CollectorStub'
-  PID="Collector"
 elif [ "$COMMAND" = "dp" ]; then
   APP='postprocess'
   CLASS='org.apache.hadoop.chukwa.extraction.demux.PostProcessorManager'
-  PID='PostProcessorManager'
 elif [ "$COMMAND" = "demux" ]; then
   APP='Demux'
   CLASS='org.apache.hadoop.chukwa.extraction.demux.DemuxManager'
-  PID='DemuxManager'
 elif [ "$COMMAND" = "hicc" ]; then
   WEB_SERVICE_COMMON=`ls ${CHUKWA_HOME}/share/chukwa/webapps/hicc.war`
   APP='hicc'
   CLASS='org.apache.hadoop.chukwa.hicc.HiccWebServer'
   CLASSPATH=${CLASSPATH}:${WEB_SERVICE_COMMON}:${HICC_JAR}
-  PID="hicc"
   HOST=`hostname`
   JAVA_OPT="${JAVA_OPT} -Djetty.host=${HOST} -Djetty.port=${CHUKWA_HICC_PORT} -Djava.net.preferIPv4Stack=true"
 elif [ "$COMMAND" = "hroll" ]; then
   APP='hroll'
   CLASS='org.apache.hadoop.chukwa.extraction.demux.HourlyChukwaRecordRolling'
   OPTS='rollInSequence true deleteRawdata true'
-  PID='HourlyChukwaRecordRolling'
 elif [ "$COMMAND" = "droll" ]; then
   APP='droll'
   CLASS='org.apache.hadoop.chukwa.extraction.demux.DailyChukwaRecordRolling'
   OPTS='rollInSequence true deleteRawdata true'
-  PID='DailyChukwaRecordRolling'
 elif [ "$COMMAND" = "version" ]; then
   echo `cat ${CHUKWA_HOME}/bin/VERSION`
   exit 0
@@ -160,14 +150,30 @@ elif [ "$COMMAND" = "tail" ]; then
   exit 0
 fi
 
-pid="${CHUKWA_PID_DIR}/$PID.pid"
+pid="$CHUKWA_PID_DIR/chukwa-$CHUKWA_IDENT_STRING-$COMMAND.pid"
 
 if [ "$1" = "start" ]; then
-  shift
+  if [ -f $pid ]; then
+    TARGET_PID=`cat $pid`
+    if [ $TARGET_PID == $$ ]; then
+      # run command
+      exec ${JAVA_HOME}/bin/java ${JAVA_OPT} \
+        -Djava.library.path=${JAVA_LIBRARY_PATH} \
+        -DCHUKWA_HOME=${CHUKWA_HOME} \
+        -DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} \
+        -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR} \
+        -DCHUKWA_DATA_DIR=${CHUKWA_DATA_DIR} \
+        -DAPP=${APP} -Dlog4j.configuration=chukwa-log4j.properties \
+        -classpath ${CHUKWA_CONF_DIR}:${CLASSPATH}:${CHUKWA_CLASSPATH}:${tools} ${CLASS} $OPTS $@
+    elif kill -0 `cat $pid` > /dev/null 2>&1; then
+      echo $COMMAND running as process `cat $pid`.  Stop it first.
+      exit 1
+    fi
+  fi
 fi
 
 if [ "$1" = "stop" ]; then
-  if [ -e $pid ]; then
+  if [ -f $pid ]; then
     TARGET_PID=`cat $pid`
     if kill -0 $TARGET_PID > /dev/null 2>&1; then
       kill -TERM $TARGET_PID
@@ -178,25 +184,8 @@ if [ "$1" = "stop" ]; then
       fi
     fi
   else 
-    echo "Cannot find PID file - $PID.pid; NO $PID to stop";
+    echo "Cannot find PID file - $pid.pid; NO $COMMAND to stop";
   fi
   exit 0
-elif [ -f $pid ]; then
- if kill -0 `cat $pid` > /dev/null 2>&1; then
-    echo $command running as process `cat $pid`.  Stop it first.
-    exit 1
- else
-    # pid file exists, but process is dead.
-    echo $command is not runnning, but pid file existed.
-    rm -f $pid
- fi
-fi
-
-# run command
-if [ "$BACKGROUND" = "false" ]; then
-  ${JAVA_HOME}/bin/java ${JAVA_OPT} -Djava.library.path=${JAVA_LIBRARY_PATH} -DCHUKWA_HOME=${CHUKWA_HOME} -DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR} -DCHUKWA_DATA_DIR=${CHUKWA_DATA_DIR} -DAPP=${APP} -Dlog4j.configuration=chukwa-log4j.properties -classpath ${CHUKWA_CONF_DIR}:${CLASSPATH}:${CHUKWA_CLASSPATH}:${tools} ${CLASS} $OPTS $@
-else
-  exec ${JAVA_HOME}/bin/java ${JAVA_OPT} -Djava.library.path=${JAVA_LIBRARY_PATH} -DCHUKWA_HOME=${CHUKWA_HOME} -DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR} -DCHUKWA_DATA_DIR=${CHUKWA_DATA_DIR} -DAPP=${APP} -Dlog4j.configuration=chukwa-log4j.properties -classpath ${CHUKWA_CONF_DIR}:${CLASSPATH}:${CHUKWA_CLASSPATH}:${tools} ${CLASS} $OPTS $@ &
-  sleep 1
 fi
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/bin/chukwa-daemon.sh
----------------------------------------------------------------------
diff --git a/bin/chukwa-daemon.sh b/bin/chukwa-daemon.sh
index 6c27440..c679f74 100755
--- a/bin/chukwa-daemon.sh
+++ b/bin/chukwa-daemon.sh
@@ -114,7 +114,7 @@ case $startStop in
     chukwa_rotate_log $log
     echo starting $command, logging to $log
     cd "$CHUKWA_HOME"
-    nohup nice -n $CHUKWA_NICENESS "$CHUKWA_HOME"/bin/chukwa --config $CHUKWA_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
+    nohup nice -n $CHUKWA_NICENESS "$CHUKWA_HOME"/bin/chukwa --config $CHUKWA_CONF_DIR $command start "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     sleep 1; head "$log"
     ;;

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3b9ae8d..6beaef8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -32,7 +32,7 @@
         <TODO_ALERT_EMAIL>user@example.com</TODO_ALERT_EMAIL>
         <TODO_CLUSTER_NAME>chukwa</TODO_CLUSTER_NAME>
         <TODO_COLLECTORS_LOCAL_OUTPUT_DIR>/tmp/chukwa/dataSink/</TODO_COLLECTORS_LOCAL_OUTPUT_DIR>
-        <TODO_COLLECTORS_NAMENODE>hdfs://localhost:9000</TODO_COLLECTORS_NAMENODE>
+        <TODO_COLLECTORS_NAMENODE>hdfs:///</TODO_COLLECTORS_NAMENODE>
         <TODO_COLLECTORS_ROTATEINTERVAL>300000</TODO_COLLECTORS_ROTATEINTERVAL>
         <TODO_COLLECTORS_PORT>8080</TODO_COLLECTORS_PORT>
         <TODO_COLLECTORS_ISFIXEDTIMEROTATORSCHEME>false</TODO_COLLECTORS_ISFIXEDTIMEROTATORSCHEME>
@@ -372,6 +372,17 @@
                 <filtering>true</filtering>
             </resource>
         </resources>
+        <testResources>
+            <testResource>
+                <directory>src/main/web/hicc</directory>
+                <includes>
+                    <include>/WEB-INF/jetty.xml</include>
+                </includes>
+            </testResource>
+            <testResource>
+                <directory>src/test/resources</directory>
+            </testResource>
+        </testResources>
         <plugins>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
@@ -524,7 +535,7 @@
                                 <test.cache.data>${project.build.directory}/test/var/cache</test.cache.data>
                                 <test.debug.data>${project.build.directory}/test/var/debug</test.debug.data>
                                 <test.log.dir>${project.build.directory}/test/var/log</test.log.dir>
-                                <test.build.classes>${project.build.directory}/test/classes</test.build.classes>
+                                <test.build.classes>${project.build.directory}/test-classes</test.build.classes>
                                 <CHUKWA_LOG_DIR>${project.build.directory}/test/var/log</CHUKWA_LOG_DIR>
                                 <CHUKWA_CONF_DIR>${project.build.directory}/test/conf</CHUKWA_CONF_DIR>
                                 <CHUKWA_DATA_DIR>${project.build.directory}/test/var</CHUKWA_DATA_DIR>
@@ -540,7 +551,7 @@
                                 <CHUKWA_HDFS>/chukwa</CHUKWA_HDFS>
                                 <CHUKWA_HOME>${project.build.directory}/test</CHUKWA_HOME>
                             </environmentVariables>
-                            <workingDirectory>${project.build.directory}/test/classes</workingDirectory>
+                            <workingDirectory>${project.build.directory}/test-classes</workingDirectory>
                         </configuration>
                     </execution>
                     <execution>

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
index df4dfe1..7dad2d7 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/agent/ChukwaAgent.java
@@ -49,7 +49,6 @@ import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
 import org.apache.hadoop.chukwa.datacollection.test.ConsoleOutConnector;
 import org.apache.hadoop.chukwa.util.AdaptorNamingUtils;
 import org.apache.hadoop.chukwa.util.ChukwaUtil;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
@@ -255,8 +254,6 @@ public class ChukwaAgent implements AdaptorManager {
    */
   public static void main(String[] args) throws AdaptorException {
 
-    DaemonWatcher.createInstance("Agent");
-
     try {
       if (args.length > 0 && args[0].equals("-help")) {
         System.out.println("usage:  LocalAgent [-noCheckPoint]"
@@ -300,7 +297,6 @@ public class ChukwaAgent implements AdaptorManager {
       System.out
           .println("agent started already on this machine with same portno;"
               + " bailing out");
-      DaemonWatcher.bailout(-1);
       System.exit(0); // better safe than sorry
     } catch (Exception e) {
       e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/CollectorStub.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/CollectorStub.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/CollectorStub.java
index 73c378e..1312e2f 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/CollectorStub.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/CollectorStub.java
@@ -25,7 +25,6 @@ import org.mortbay.jetty.servlet.*;
 import org.apache.hadoop.chukwa.datacollection.collector.servlet.*;
 import org.apache.hadoop.chukwa.datacollection.connector.http.HttpConnector;
 import org.apache.hadoop.chukwa.datacollection.writer.*;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -44,7 +43,6 @@ public class CollectorStub {
 
   public static void main(String[] args) {
 
-    DaemonWatcher.createInstance("Collector");
     try {
       if (args.length > 0 && (args[0].equalsIgnoreCase("help")|| args[0].equalsIgnoreCase("-help"))) {
         System.out.println("usage: Normally you should just invoke CollectorStub without arguments.");
@@ -142,7 +140,6 @@ public class CollectorStub {
       System.err.close();
     } catch (Exception e) {
       e.printStackTrace();
-      DaemonWatcher.bailout(-1);
     }
 
   }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/ServletCollector.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/ServletCollector.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/ServletCollector.java
index 61f55ec..5c3ea71 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/ServletCollector.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/collector/servlet/ServletCollector.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.WriterException;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.compress.CompressionCodec;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -141,7 +140,6 @@ public class ServletCollector extends HttpServlet {
       writer.init(conf);
     } catch (Throwable e) {
       log.warn("Exception trying to initialize SeqFileWriter",e);
-      DaemonWatcher.bailout(-1);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/PipelineConnector.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/PipelineConnector.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/PipelineConnector.java
index 73280fe..b998139 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/PipelineConnector.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/PipelineConnector.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter.CommitStatus;
 import org.apache.hadoop.chukwa.datacollection.writer.PipelineStageWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.WriterException;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 
@@ -134,11 +133,11 @@ public class PipelineConnector implements Connector, Runnable {
       log.warn("PipelineStageWriter Exception: ", e);
     } catch (OutOfMemoryError e) {
       log.warn("Bailing out", e);
-      DaemonWatcher.bailout(-1);
+      throw new RuntimeException("Shutdown pipeline connector.");
     } catch (InterruptedException e) {
       // do nothing, let thread die.
       log.warn("Bailing out", e);
-      DaemonWatcher.bailout(-1);
+      throw new RuntimeException("Shutdown pipeline connector.");
     } catch (Throwable e) {
       log.error("connector failed; shutting down agent: ", e);
       throw new RuntimeException("Shutdown pipeline connector.");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
index f412d72..3bb0dd7 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/connector/http/HttpConnector.java
@@ -47,7 +47,6 @@ import org.apache.hadoop.chukwa.datacollection.DataFactory;
 import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
 import org.apache.hadoop.chukwa.datacollection.connector.Connector;
 import org.apache.hadoop.chukwa.datacollection.sender.*;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 
@@ -184,11 +183,9 @@ public class HttpConnector implements Connector, Runnable {
       log.info("received stop() command so exiting run() loop to shutdown connector");
     } catch (OutOfMemoryError e) {
       log.warn("Bailing out", e);
-      DaemonWatcher.bailout(-1);
     } catch (InterruptedException e) {
       // do nothing, let thread die.
       log.warn("Bailing out", e);
-      DaemonWatcher.bailout(-1);
     } catch (java.io.IOException e) {
       log.error("connector failed; shutting down agent");
       agent.shutdown(true);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SeqFileWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SeqFileWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SeqFileWriter.java
index 9a9e8d2..3c0d268 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SeqFileWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/SeqFileWriter.java
@@ -33,7 +33,6 @@ import java.io.IOException;
 import org.apache.hadoop.chukwa.ChukwaArchiveKey;
 import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.ChunkImpl;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -137,13 +136,11 @@ public class SeqFileWriter extends PipelineableWriter implements ChukwaWriter {
       fs = FileSystem.get(new URI(fsname), conf);
       if (fs == null) {
         log.error("can't connect to HDFS at " + fs.getUri() + " bail out!");
-        DaemonWatcher.bailout(-1);
       }
     } catch (Throwable e) {
       log.error(
           "can't connect to HDFS, trying default file system instead (likely to be local)",
           e);
-      DaemonWatcher.bailout(-1);
     }
 
     // Setup everything by rotating
@@ -365,7 +362,6 @@ public class SeqFileWriter extends PipelineableWriter implements ChukwaWriter {
       catch (Throwable e) {
         // We don't want to loose anything
         log.fatal("IOException when trying to write a chunk, Collector is going to exit!", e);
-        DaemonWatcher.bailout(-1);
         isRunning = false;
       } finally {
         lock.release();

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
index 77769a2..02e7907 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalToRemoteHdfsMover.java
@@ -23,7 +23,6 @@ import java.net.URI;
 import java.util.concurrent.BlockingQueue;
 
 import org.apache.hadoop.chukwa.util.CopySequenceFile;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -87,7 +86,7 @@ public class LocalToRemoteHdfsMover extends Thread {
     remoteFs = FileSystem.get(new URI(fsname), conf);
     if (remoteFs == null && exitIfHDFSNotavailable) {
       log.error("can't connect to HDFS at " + remoteFs.getUri() + " bail out!");
-      DaemonWatcher.bailout(-1);
+      System.exit(-1);
     } 
     
     localFs = FileSystem.getLocal(conf);

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalWriter.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalWriter.java b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalWriter.java
index efd1234..bb0fdf6 100644
--- a/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalWriter.java
+++ b/src/main/java/org/apache/hadoop/chukwa/datacollection/writer/localfs/LocalWriter.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.WriterException;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -149,7 +148,7 @@ public class LocalWriter implements ChukwaWriter {
       }
     } catch (Throwable e) {
       log.fatal("Cannot initialize LocalWriter", e);
-      DaemonWatcher.bailout(-1);
+      System.exit(-1);
     }
 
     
@@ -265,7 +264,7 @@ public class LocalWriter implements ChukwaWriter {
         if (writeChunkRetries < 0) {
           log
               .fatal("Too many IOException when trying to write a chunk, Collector is going to exit!");
-          DaemonWatcher.bailout(-1);
+          System.exit(-1);
         }
         throw new WriterException(e);
       }
@@ -320,7 +319,7 @@ public class LocalWriter implements ChukwaWriter {
         log.fatal("IO Exception in rotate. Exiting!", e);
         // Shutting down the collector
         // Watchdog will re-start it automatically
-        DaemonWatcher.bailout(-1);
+        System.exit(-1);
       }
     }
  
@@ -338,7 +337,7 @@ public class LocalWriter implements ChukwaWriter {
   
     if (freeSpace < minFreeAvailable) {
       log.fatal("No space left on device, Bail out!");
-      DaemonWatcher.bailout(-1);
+      System.exit(-1);
     } 
     
     log.debug("finished rotate()");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java b/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
index 8b0085d..1ef6c00 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/archive/ChukwaArchiveManager.java
@@ -25,7 +25,6 @@ import java.text.SimpleDateFormat;
 
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.extraction.CHUKWA_CONSTANT;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -57,7 +56,6 @@ public class ChukwaArchiveManager implements CHUKWA_CONSTANT {
   }
 
   public static void main(String[] args) throws Exception {
-    DaemonWatcher.createInstance("ArchiveManager");
     
     ChukwaArchiveManager manager = new ChukwaArchiveManager();
     manager.start();
@@ -115,7 +113,7 @@ public class ChukwaArchiveManager implements CHUKWA_CONSTANT {
         if (maxPermittedErrorCount != -1 && errorCount >= maxPermittedErrorCount) {
           log.warn("==================\nToo many errors (" + errorCount +
                    "), Bail out!\n==================");
-          DaemonWatcher.bailout(-1);
+          System.exit(-1);
         }
         // /chukwa/archives/<YYYYMMDD>/dataSinkDirXXX
         //  to

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
index 8d2926f..d1e2b24 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java
@@ -28,7 +28,6 @@ import java.util.List;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.util.HierarchyDataType;
 import org.apache.hadoop.conf.Configured;
@@ -209,7 +208,6 @@ public class DailyChukwaRecordRolling extends Configured implements Tool {
    */
   public static void main(String[] args) throws Exception {
     
-    DaemonWatcher.createInstance("DailyChukwaRecordRolling");
     
     conf = new ChukwaConfiguration();
     String fsName = conf.get("writer.hdfs.filesystem");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
index 5d59c48..8fd155e 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/DemuxManager.java
@@ -27,7 +27,6 @@ import java.util.Date;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.extraction.CHUKWA_CONSTANT;
 import org.apache.hadoop.chukwa.util.NagiosHelper;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -67,7 +66,6 @@ public class DemuxManager implements CHUKWA_CONSTANT {
 
 
   public static void main(String[] args) throws Exception {
-    DaemonWatcher.createInstance("DemuxManager");
     
     DemuxManager manager = new DemuxManager();
     manager.start();
@@ -161,7 +159,7 @@ public class DemuxManager implements CHUKWA_CONSTANT {
          if (maxPermittedErrorCount != -1 && globalErrorcounter >= maxPermittedErrorCount) {
            log.warn("==================\nToo many errors (" + globalErrorcounter +
                     "), Bail out!\n==================");
-           DaemonWatcher.bailout(-1);
+           System.exit(-1);
          }
          
          // Check for anomalies

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
index 9455f8d..c8f2799 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.util.HierarchyDataType;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -158,7 +157,6 @@ public class HourlyChukwaRecordRolling extends Configured implements Tool {
    * @throws Exception
    */
   public static void main(String[] args) throws Exception {
-    DaemonWatcher.createInstance("HourlyChukwaRecordRolling");
     
     conf = new ChukwaConfiguration();
     String fsName = conf.get("writer.hdfs.filesystem");

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
index 2310e03..9685471 100644
--- a/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
+++ b/src/main/java/org/apache/hadoop/chukwa/extraction/demux/PostProcessorManager.java
@@ -29,7 +29,6 @@ import java.util.List;
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.dataloader.DataLoaderFactory;
 import org.apache.hadoop.chukwa.extraction.CHUKWA_CONSTANT;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.util.HierarchyDataType;
 import org.apache.hadoop.chukwa.datatrigger.TriggerAction;
@@ -76,11 +75,6 @@ public class PostProcessorManager implements CHUKWA_CONSTANT{
   }
   
   public static void main(String[] args) throws Exception {
- 
-    DaemonWatcher.createInstance("PostProcessorManager");
-    
-
-    
     PostProcessorManager postProcessorManager = new PostProcessorManager();
     postProcessorManager.start();
   }
@@ -123,7 +117,7 @@ public class PostProcessorManager implements CHUKWA_CONSTANT{
       if (maxPermittedErrorCount != -1 && errorCount >= maxPermittedErrorCount) {
         log.warn("==================\nToo many errors (" + errorCount +
                  "), Bail out!\n==================");
-        DaemonWatcher.bailout(-1);
+        throw new RuntimeException("Bail out!");
       }
 
       try {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java b/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
index 40053e8..09e829d 100644
--- a/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
+++ b/src/main/java/org/apache/hadoop/chukwa/hicc/HiccWebServer.java
@@ -31,7 +31,6 @@ import java.util.jar.JarEntry;
 import java.util.jar.JarFile;
 
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -61,7 +60,7 @@ public class HiccWebServer {
     }
     if(serverConf==null) {
       log.error("Unable to locate jetty-web.xml.");
-      DaemonWatcher.bailout(-1);
+      System.exit(-1);
     }
     return instance;
   }
@@ -70,12 +69,11 @@ public class HiccWebServer {
     try {
       chukwaHdfs = config.get("fs.defaultFS")+File.separator+chukwaConf.get("chukwa.data.dir");
       hiccData = chukwaHdfs+File.separator+"hicc";
-      DaemonWatcher.createInstance("hicc");
       setupDefaultData();
       run();
     } catch(Exception e) {
       log.error("HDFS unavailable, check configuration in chukwa-env.sh.");
-      DaemonWatcher.bailout(-1);
+      throw new RuntimeException("Bail out!");
     }
   }
 
@@ -213,7 +211,6 @@ public class HiccWebServer {
   public void shutdown() {
     try {
       server.stop();
-      DaemonWatcher.bailout(0);
     } catch (Exception e) {
       log.error(ExceptionUtil.getStackTrace(e));
     }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
index ab62a4c..0bed639 100644
--- a/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
+++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/jplugin/JPluginAgent.java
@@ -24,7 +24,6 @@ import java.util.TimerTask;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class JPluginAgent {
@@ -103,7 +102,6 @@ public class JPluginAgent {
     }
 
     try {
-      DaemonWatcher.createInstance(plugin.getRecordType() + "-data-loader");
     } catch (Exception e) {
       e.printStackTrace();
     }

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java
deleted file mode 100644
index ca9aa69..0000000
--- a/src/main/java/org/apache/hadoop/chukwa/inputtools/mdl/TorqueDataLoader.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.chukwa.inputtools.mdl;
-
-
-import java.sql.SQLException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.chukwa.util.PidFile;
-import org.apache.hadoop.chukwa.util.ExceptionUtil;
-
-public class TorqueDataLoader {
-  private static Log log = LogFactory.getLog("TorqueDataLoader");
-
-  private TorqueInfoProcessor tp = null;
-  private PidFile loader = null;
-
-  public TorqueDataLoader(DataConfig mdlConfig, int interval) {
-    log.info("in torqueDataLoader");
-    tp = new TorqueInfoProcessor(mdlConfig, interval);
-    loader = new PidFile("TorqueDataLoader");
-  }
-
-  public void run() {
-    boolean first = true;
-    while (true) {
-      try {
-        tp.setup(first);
-        first = false;
-      } catch (Exception ex) {
-        tp.shutdown();
-
-        if (first) {
-          log.error("setup error");
-          ex.printStackTrace();
-          loader.clean(); // only call before system.exit()
-          System.exit(1);
-        }
-        log.error("setup fail, retry after 10 minutes");
-        try {
-          Thread.sleep(600 * 1000);
-        } catch (InterruptedException e) {
-          // TODO Auto-generated catch block
-          log.error(e.getMessage());
-          // e.printStackTrace();
-        }
-        continue;
-
-      }
-
-      try {
-        tp.run_forever();
-      } catch (SQLException ex) {
-        tp.shutdown();
-        log.error("processor died, reconnect again after 10 minutes");
-        ex.printStackTrace();
-        try {
-          Thread.sleep(600 * 1000);
-        } catch (InterruptedException e) {
-          // TODO Auto-generated catch block
-          log.error(e.getMessage());
-          // e.printStackTrace();
-        }
-      } catch (Exception ex) {
-        try {
-          Thread.sleep(16 * 1000);
-        } catch (InterruptedException e) {
-          log.debug(ExceptionUtil.getStackTrace(e));
-        }
-        tp.shutdown();
-        log.error("process died...." + ex.getMessage());
-        loader.clean();
-        System.exit(1);
-      }
-
-    }// while
-
-  }
-
-  public static void main(String[] args) {
-    /*
-     * if (args.length < 2 || args[0].startsWith("-h") ||
-     * args[0].startsWith("--h")) {
-     * System.out.println("Usage: UtilDataLoader interval(sec)");
-     * System.exit(1);puvw-./chij } String interval = args[0]; int
-     * intervalValue=Integer.parseInt(interval);
-     */
-    int intervalValue = 60;
-
-    DataConfig mdlConfig = new DataConfig();
-
-    TorqueDataLoader tdl = new TorqueDataLoader(mdlConfig, intervalValue);
-    tdl.run();
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java b/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java
index 7853e5c..f3b11cf 100644
--- a/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java
+++ b/src/main/java/org/apache/hadoop/chukwa/inputtools/plugin/metrics/Exec.java
@@ -23,7 +23,6 @@ import java.util.TimerTask;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.inputtools.plugin.IPlugin;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.json.simple.JSONObject;
 
 public class Exec extends TimerTask {
@@ -62,7 +61,6 @@ public class Exec extends TimerTask {
   }
 
   public static void main(String[] args) {
-    DaemonWatcher.createInstance(System.getProperty("RECORD_TYPE") + "-data-loader");
     int period = 60;
     try {
       if (System.getProperty("PERIOD") != null) {

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/QueueToWriterConnector.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/QueueToWriterConnector.java b/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/QueueToWriterConnector.java
index df230d5..7b0ca58 100644
--- a/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/QueueToWriterConnector.java
+++ b/src/main/java/org/apache/hadoop/chukwa/tools/backfilling/QueueToWriterConnector.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
 import org.apache.hadoop.chukwa.datacollection.connector.Connector;
 import org.apache.hadoop.chukwa.datacollection.writer.ChukwaWriter;
 import org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter;
-import org.apache.hadoop.chukwa.util.DaemonWatcher;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 
@@ -102,7 +101,7 @@ public class QueueToWriterConnector implements Connector, Runnable {
 
     } catch (Throwable e) {
       log.warn("failed to use user-chosen writer class, Bail out!", e);
-      DaemonWatcher.bailout(-1);
+      throw new RuntimeException("Bail out!");
     }
 
     

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/util/DaemonWatcher.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/DaemonWatcher.java b/src/main/java/org/apache/hadoop/chukwa/util/DaemonWatcher.java
deleted file mode 100644
index 8dbd007..0000000
--- a/src/main/java/org/apache/hadoop/chukwa/util/DaemonWatcher.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.chukwa.util;
-
-public class DaemonWatcher extends PidFile {
-  private static DaemonWatcher instance = null;
-  
-  public synchronized static DaemonWatcher createInstance(String name) {
-    if(instance == null) {
-      instance = new DaemonWatcher(name);
-      Runtime.getRuntime().addShutdownHook(instance);
-    }
-    return instance;
-  }
-  
-  public static DaemonWatcher getInstance() {
-    return instance;
-  }
-  
-  private DaemonWatcher(String name) {
-    super(name);
-  }
-  
-  public static void bailout(int status) {
-    System.exit(status);
-  }
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/main/java/org/apache/hadoop/chukwa/util/PidFile.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/hadoop/chukwa/util/PidFile.java b/src/main/java/org/apache/hadoop/chukwa/util/PidFile.java
deleted file mode 100644
index 9154fa5..0000000
--- a/src/main/java/org/apache/hadoop/chukwa/util/PidFile.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.util;
-
-
-import java.io.*;
-import java.lang.management.ManagementFactory;
-import java.nio.channels.*;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class PidFile extends Thread {
-
-  String name;
-  private static Log log = LogFactory.getLog(PidFile.class);
-  private static FileLock lock = null;
-  private static FileOutputStream pidFileOutput = null;
-  private static final String DEFAULT_CHUKWA_HOME;
-  
-  static {
-      //use /tmp as a default, only if we can't create tmp files via Java.
-    File chukwaHome = new File(System.getProperty("java.io.tmpdir"), "chukwa");
-    try {
-      File tmpFile = File.createTempFile("chukwa", "discovertmp");
-      File tmpDir = tmpFile.getParentFile();
-      tmpFile.delete();
-      chukwaHome = new File(tmpDir, "chukwa");
-      chukwaHome.mkdir();
-    } catch(IOException e) {
-      log.debug(ExceptionUtil.getStackTrace(e));
-    } finally {    
-      DEFAULT_CHUKWA_HOME = chukwaHome.getAbsolutePath();
-    }
-  };
-
-  public PidFile(String name) {
-    this.name = name;
-    try {
-      init();
-    } catch (IOException ex) {
-      clean();
-      System.exit(-1);
-    }
-  }
-
-  public void init() throws IOException {
-    String pidLong = ManagementFactory.getRuntimeMXBean().getName();
-    String[] items = pidLong.split("@");
-    String pid = items[0];
-    String chukwaPath = System.getProperty("CHUKWA_HOME");
-    if(chukwaPath == null) {
-      chukwaPath = DEFAULT_CHUKWA_HOME;
-    }
-    StringBuffer pidFilesb = new StringBuffer();
-    String pidDir = System.getenv("CHUKWA_PID_DIR");
-    if (pidDir == null) {
-      pidDir = chukwaPath + File.separator + "var" + File.separator + "run";
-    }
-    pidFilesb.append(pidDir).append(File.separator).append(name).append(".pid");
-    try {
-      File existsFile = new File(pidDir);
-      if (!existsFile.exists()) {
-        boolean success = (new File(pidDir)).mkdirs();
-        if (!success) {
-          throw (new IOException());
-        }
-      }
-      File pidFile = new File(pidFilesb.toString());
-
-      pidFileOutput = new FileOutputStream(pidFile);
-      pidFileOutput.write(pid.getBytes());
-      pidFileOutput.flush();
-      FileChannel channel = pidFileOutput.getChannel();
-      PidFile.lock = channel.tryLock();
-      if (PidFile.lock != null) {
-        log.debug("Initlization succeeded...");
-      } else {
-        throw (new IOException("Can not get lock on pid file: " + pidFilesb));
-      }
-    } catch (IOException ex) {
-      System.out.println("Initialization failed: can not write pid file to " + pidFilesb);
-      log.error("Initialization failed...");
-      log.error(ex.getMessage());
-      System.exit(-1);
-      throw ex;
-
-    }
-
-  }
-
-  public void clean() {
-    String chukwaPath = System.getenv("CHUKWA_HOME");
-    if(chukwaPath == null) {
-      chukwaPath = DEFAULT_CHUKWA_HOME;
-    }
-    StringBuffer pidFilesb = new StringBuffer();
-    String pidDir = System.getenv("CHUKWA_PID_DIR");
-    if (pidDir == null) {
-      pidDir = chukwaPath + File.separator + "var" + File.separator + "run";
-    }
-    pidFilesb.append(pidDir).append(File.separator).append(name).append(".pid");
-    String pidFileName = pidFilesb.toString();
-
-    File pidFile = new File(pidFileName);
-    if (!pidFile.exists()) {
-      log.error("Delete pid file, No such file or directory: " + pidFileName);
-    } else {
-      try {
-        lock.release();
-        pidFileOutput.close();
-      } catch (IOException e) {
-        log.error("Unable to release file lock: " + pidFileName);
-      }
-    }
-
-    boolean result = pidFile.delete();
-    if (!result) {
-      log.error("Delete pid file failed, " + pidFileName);
-    }
-  }
-
-  public void run() {
-    clean();
-  }
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java
index 1ab904f..898e03c 100644
--- a/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java
+++ b/src/test/java/org/apache/hadoop/chukwa/datacollection/agent/TestChukwaSsl.java
@@ -34,7 +34,7 @@ import static org.apache.hadoop.chukwa.datacollection.agent.ChukwaConstants.*;
 import junit.framework.TestCase;
 
 public class TestChukwaSsl extends TestCase{
-  String keyStoreFile = "../../test-classes/chukwa.store";
+  String keyStoreFile = "chukwa.store";
   @Override
   protected void setUp() throws IOException, InterruptedException{
     String[] cmd = new String[]{System.getenv("JAVA_HOME")+"/bin/keytool", "-genkeypair", "-keyalg", "RSA",

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestCollector.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestCollector.java b/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestCollector.java
deleted file mode 100644
index 93a9f33..0000000
--- a/src/test/java/org/apache/hadoop/chukwa/datacollection/collector/TestCollector.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.chukwa.datacollection.collector;
-
-
-import junit.framework.TestCase;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
-import org.apache.hadoop.chukwa.*;
-import org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector;
-import org.apache.hadoop.chukwa.datacollection.sender.*;
-import org.apache.hadoop.chukwa.datacollection.writer.*;
-import java.util.*;
-import org.mortbay.jetty.Server;
-import org.mortbay.jetty.servlet.Context;
-import org.mortbay.jetty.servlet.ServletHolder;
-
-public class TestCollector extends TestCase {
-
-  public void testCollector() {
-    try {
-      Configuration conf = new Configuration();
-      conf.set("chukwaCollector.chunkSuppressBufferSize", "10");
-      conf.set("chukwaCollector.pipeline",
-              "org.apache.hadoop.chukwa.datacollection.writer.Dedup,"// note
-                                                                     // comma
-                  + "org.apache.hadoop.chukwa.datacollection.collector.CaptureWriter");
-      conf.set("chukwaCollector.writerClass", PipelineStageWriter.class
-          .getCanonicalName());
-      ChukwaHttpSender sender = new ChukwaHttpSender(conf);
-      ArrayList<String> collectorList = new ArrayList<String>();
-      collectorList.add("http://localhost:9990/chukwa");
-      sender.setCollectors(new RetryListOfCollectors(collectorList, conf));
-      Server server = new Server(9990);
-      Context root = new Context(server, "/", Context.SESSIONS);
-
-      root.addServlet(new ServletHolder(new ServletCollector(conf)), "/*");
-      server.start();
-      server.setStopAtShutdown(false);
-      Thread.sleep(1000);
-
-      Chunk c = new ChunkImpl("data", "stream", 0,
-          "testing -- this should appear once".getBytes(), null);
-      ArrayList<Chunk> toSend = new ArrayList<Chunk>();
-      toSend.add(c);
-      toSend.add(c);
-      sender.send(toSend);
-      Thread.sleep(1000);
-      assertEquals(1, CaptureWriter.outputs.size());
-    } catch (Exception e) {
-      fail(e.toString());
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/chukwa/blob/ecf9b2b6/src/test/java/org/apache/hadoop/chukwa/rest/resource/SetupTestEnv.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/hadoop/chukwa/rest/resource/SetupTestEnv.java b/src/test/java/org/apache/hadoop/chukwa/rest/resource/SetupTestEnv.java
index f76f15c..04bd224 100644
--- a/src/test/java/org/apache/hadoop/chukwa/rest/resource/SetupTestEnv.java
+++ b/src/test/java/org/apache/hadoop/chukwa/rest/resource/SetupTestEnv.java
@@ -52,6 +52,13 @@ public class SetupTestEnv extends TestCase {
   public static MiniDFSCluster dfs;
   
   public SetupTestEnv() {
+    try {
+      Configuration conf=new Configuration();
+      conf.setBoolean("dfs.permissions",true);
+      dfs=new MiniDFSCluster(conf,1,true,null);
+    } catch(Exception e) {
+      fail("Fail to start MiniDFSCluster");
+    }
     if(hicc==null) {
       hicc = HiccWebServer.getInstance();
       conf = HiccWebServer.getConfig();
@@ -63,5 +70,6 @@ public class SetupTestEnv extends TestCase {
   }
   
   public void tearDown() {
+    dfs.shutdown();
   }
 }


Mime
View raw message