hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dhr...@apache.org
Subject svn commit: r686181 [2/2] - in /hadoop/core/trunk: ./ src/contrib/failmon/ src/contrib/failmon/bin/ src/contrib/failmon/conf/ src/contrib/failmon/src/ src/contrib/failmon/src/java/ src/contrib/failmon/src/java/org/ src/contrib/failmon/src/java/org/apac...
Date Fri, 15 Aug 2008 09:04:10 GMT
Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Calendar;
+
+/**********************************************************
+ * This class represents objects that provide log parsing 
+ * functionality. Typically, such objects read log files line
+ * by line and for each log entry they identify, they create a 
+ * corresponding EventRecord. In this way, disparate log files
+ * can be merged using the uniform format of EventRecords and can,
+ * thus, be processed in a uniform way.
+ * 
+ **********************************************************/
+
+public abstract class LogParser implements Monitored {
+
+  File file;
+
+  BufferedReader reader;
+
+  String hostname;
+
+  Object [] ips;
+
+  String dateformat;
+
+  String timeformat;
+
+  private String firstLine;
+  private long offset;
+
+  /**
+   * Create a parser that will read from the specified log file.
+   * 
+   * @param fname the filename of the log file to be read
+   */
+  public LogParser(String fname) {
+    file = new File(fname);
+
+    ParseState ps = PersistentState.getState(file.getAbsolutePath());
+    firstLine = ps.firstLine;
+    offset = ps.offset;
+    
+    try {
+      reader = new BufferedReader(new FileReader(file));
+      checkForRotation();
+      Environment.logInfo("Checked for rotation...");
+      reader.skip(offset);
+    } catch (FileNotFoundException e) {
+      System.err.println(e.getMessage());
+      e.printStackTrace();
+    } catch (IOException e) {
+      System.err.println(e.getMessage());
+      e.printStackTrace();
+    }
+
+    setNetworkProperties();
+  }
+
+  protected void setNetworkProperties() {
+    // determine hostname and ip addresses for the node
+    try {
+      // Get hostname
+      hostname = InetAddress.getLocalHost().getCanonicalHostName();
+      // Get all associated ip addresses
+      ips = InetAddress.getAllByName(hostname);
+
+    } catch (UnknownHostException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Insert all EventRecords that can be extracted for
+   * the represented hardware component into a LocalStore.
+   * 
+   * @param ls the LocalStore into which the EventRecords 
+   * are to be stored.
+   */
+  public void monitor(LocalStore ls) {
+    int in = 0;
+    EventRecord er = null;
+    Environment.logInfo("Started processing log...");
+
+    while ((er = getNext()) != null) {
+      // Environment.logInfo("Processing log line:\t" + in++);
+      if (er.isValid()) {
+        ls.insert(er);
+      }
+    }
+
+    PersistentState.updateState(file.getAbsolutePath(), firstLine, offset);
+    PersistentState.writeState("conf/parsing.state");
+  }
+
+  /**
+   * Get an array of all EventRecords that can be extracted for
+   * the represented hardware component.
+   * 
+   * @return The array of EventRecords
+   */
+  public EventRecord[] monitor() {
+
+    ArrayList<EventRecord> recs = new ArrayList<EventRecord>();
+    EventRecord er;
+
+    while ((er = getNext()) != null)
+      recs.add(er);
+
+    EventRecord[] T = new EventRecord[recs.size()];
+
+    return recs.toArray(T);
+  }
+
+  /**
+   * Continue parsing the log file until a valid log entry is identified.
+   * When one such entry is found, parse it and return a corresponding EventRecord.
+   * 
+   *  
+   * @return The EventRecord corresponding to the next log entry
+   */
+  public EventRecord getNext() {
+    try {
+	String line = reader.readLine();
+	if (line != null) {
+	    if (firstLine == null)
+		firstLine = new String(line);
+	    offset += line.length() + 1;
+	    return parseLine(line);
+	}
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    return null;
+  }
+
+  /**
+   * Return the BufferedReader, that reads the log file
+   *  
+   * @return The BufferedReader that reads the log file
+   */
+  public BufferedReader getReader() {
+    return reader;
+  }
+
+  /**
+   * Check whether the log file has been rotated. If so,
+   * start reading the file from the beginning.
+   *  
+   */
+  public void checkForRotation() {
+    try {
+      BufferedReader probe = new BufferedReader(new FileReader(file.getAbsoluteFile()));
+      if (firstLine == null || (!firstLine.equals(probe.readLine()))) {
+	probe.close();
+	// start reading the file from the beginning
+        reader.close();
+        reader = new BufferedReader(new FileReader(file.getAbsoluteFile()));
+	firstLine = null;
+	offset = 0;
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Parses one line of the log. If the line contains a valid 
+   * log entry, then an appropriate EventRecord is returned, after all
+   * relevant fields have been parsed.
+   *
+   *  @param line the log line to be parsed
+   *
+   *  @return the EventRecord representing the log entry of the line. If 
+   *  the line does not contain a valid log entry, then the EventRecord 
+   *  returned has isValid() = false. When the end-of-file has been reached,
+   *  null is returned to the caller.
+   */
+  abstract public EventRecord parseLine(String line) throws IOException;
+
+  /**
+   * Parse a date found in Hadoop log file.
+   * 
+   * @return a Calendar representing the date
+   */
+  abstract protected Calendar parseDate(String strDate, String strTime);
+
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+/**********************************************************
+ * This class is a wrapper for a monitoring job. 
+ * 
+ **********************************************************/
+
+public class MonitorJob {
+  Monitored job;
+
+  String type;
+  int interval;
+  int counter;
+
+  public MonitorJob(Monitored _job, String _type, int _interval) {
+    job = _job;
+    type = _type;
+    interval = _interval;
+    counter = _interval;
+  }
+
+  public void reset() {
+    counter = interval;
+  }
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+/**********************************************************
+ * Represents objects that monitor specific hardware resources and
+ * can query them to get EventRecords describing the state of these
+ * resources.
+ *
+ **********************************************************/
+
+public interface Monitored {
+  /**
+   * Get an array of all EventRecords that can be extracted for
+   * the represented hardware component.
+   * 
+   * @return The array of EventRecords
+   */
+  public EventRecord[] monitor();
+  
+  /**
+   * Inserts all EventRecords that can be extracted for
+   * the represented hardware component into a LocalStore.
+   * 
+   * @param ls the LocalStore into which the EventRecords 
+   * are to be stored.
+   */
+  public void monitor(LocalStore ls);
+  
+  /**
+   * Return a String with information about the implementing
+   * class 
+   * 
+   * @return A String describing the implementing class
+   */
+  public String getInfo();
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Calendar;
+
+/**********************************************************
+ * Objects of this class parse the output of ifconfig to 
+ * gather information about present Network Interface Cards
+ * in the system. The list of NICs to poll is specified in the 
+ * configuration file.
+ * 
+ **********************************************************/
+
+
+public class NICParser extends ShellParser {
+
+  String[] nics;
+
+  /**
+   * Constructs a NICParser and reads the list of NICs to query
+   */
+  public NICParser() {
+    super();
+    nics = Environment.getProperty("nic.list").split(",\\s*");
+  }
+
+  /**
+   * Reads and parses the output of ifconfig for a specified NIC and 
+   * creates an appropriate EventRecord that holds the desirable 
+   * information for it.
+   * 
+   * @param device the NIC device name to query
+   * 
+   * @return the EventRecord created
+   */
+  public EventRecord query(String device) throws UnknownHostException {
+    StringBuffer sb = Environment.runCommand("/sbin/ifconfig " + device);
+    EventRecord retval = new EventRecord(InetAddress.getLocalHost()
+        .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
+        .getHostName()), Calendar.getInstance(), "NIC", "Unknown", device, "-");
+
+    retval.set("hwAddress", findPattern("HWaddr\\s*([\\S{2}:]{17})", sb
+        .toString(), 1));
+
+    retval.set("ipAddress", findPattern("inet\\s+addr:\\s*([\\w.?]*)", sb
+        .toString(), 1));
+
+    String tmp = findPattern("inet\\s+addr:\\s*([\\w.?]*)", sb.toString(), 1);
+    retval.set("status", (tmp == null) ? "DOWN" : "UP");
+    if (tmp != null)
+      retval.set("ipAddress", tmp);
+
+    retval.set("rxPackets", findPattern("RX\\s*packets\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("rxErrors", findPattern("RX.+errors\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("rxDropped", findPattern("RX.+dropped\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("rxOverruns", findPattern("RX.+overruns\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("rxFrame", findPattern("RX.+frame\\s*:\\s*(\\d+)",
+        sb.toString(), 1));
+
+    retval.set("txPackets", findPattern("TX\\s*packets\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("txErrors", findPattern("TX.+errors\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("txDropped", findPattern("TX.+dropped\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("txOverruns", findPattern("TX.+overruns\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("txCarrier", findPattern("TX.+carrier\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+
+    retval.set("collisions", findPattern("\\s+collisions\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+
+    retval.set("rxBytes", findPattern("RX\\s*bytes\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+    retval.set("txBytes", findPattern("TX\\s*bytes\\s*:\\s*(\\d+)", sb
+        .toString(), 1));
+
+    return retval;
+  }
+
+  /**
+   * Invokes query() to do the parsing and handles parsing errors for 
+   * each one of the NICs specified in the configuration. 
+   * 
+   * @return an array of EventRecords that holds one element that represents
+   * the current state of network interfaces.
+   */
+  public EventRecord[] monitor() {
+    ArrayList<EventRecord> recs = new ArrayList<EventRecord>();
+
+    for (String nic : nics) {
+      try {
+        recs.add(query(nic));
+      } catch (UnknownHostException e) {
+        e.printStackTrace();
+      }
+    }
+
+    EventRecord[] T = new EventRecord[recs.size()];
+
+    return recs.toArray(T);
+  }
+  
+  /**
+   * Return a String with information about this class
+   * 
+   * @return A String describing this class
+   */
+  public String getInfo() {
+    String retval = "ifconfig parser for interfaces: ";
+    for (String nic : nics)
+      retval += nic + " ";
+    return retval;
+  }
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+
+/**********************************************************
+ * This class can be used to anonymize logs independently of
+ * Hadoop and the Executor. It parses the specified log file to
+ * create log records for it and then passes them to the Anonymizer.
+ * After they are anonymized, they are written to a local file,
+ * which is then compressed and stored locally.
+ * 
+ **********************************************************/
+
+public class OfflineAnonymizer {
+
+  public enum LogType {
+    HADOOP, SYSTEM
+  };
+
+  LogType logtype;
+
+  File logfile;
+
+  LogParser parser;
+
+  /**
+   * Creates an OfflineAnonymizer for a specific log file.
+   * 
+   * @param logtype the type of the log file. This can either be
+   * LogFile.HADOOP or LogFile.SYSTEM
+   * @param filename the path to the log file
+   * 
+   */  
+  public OfflineAnonymizer(LogType logtype, String filename) {
+
+    logfile = new File(filename);
+
+    if (!logfile.exists()) {
+      System.err.println("Input file does not exist!");
+      System.exit(0);
+    }
+
+    if (logtype == LogType.HADOOP)
+      parser = new HadoopLogParser(filename);
+    else
+      parser = new SystemLogParser(filename);
+  }
+
+  /**
+   * Performs anonymization for the log file. Log entries are
+   * read one by one and EventRecords are created, which are then
+   * anonymized and written to the output.
+   * 
+   */
+  public void anonymize() throws Exception {
+    EventRecord er = null;
+    SerializedRecord sr = null;
+
+    BufferedWriter bfw = new BufferedWriter(new FileWriter(logfile.getName()
+        + ".anonymized"));
+
+    System.out.println("Anonymizing log records...");
+    while ((er = parser.getNext()) != null) {
+      if (er.isValid()) {
+        sr = new SerializedRecord(er);
+        Anonymizer.anonymize(sr);
+        bfw.write(LocalStore.pack(sr).toString());
+        bfw.write(LocalStore.RECORD_SEPARATOR);
+      }
+    }
+    bfw.flush();
+    bfw.close();
+    System.out.println("Anonymized log records written to " + logfile.getName()
+        + ".anonymized");
+
+    System.out.println("Compressing output file...");
+    LocalStore.zipCompress(logfile.getName() + ".anonymized");
+    System.out.println("Compressed output file written to " + logfile.getName()
+        + ".anonymized" + LocalStore.COMPRESSION_SUFFIX);
+  }
+
+  public static void main(String[] args) {
+
+    if (args.length < 2) {
+      System.out.println("Usage: OfflineAnonymizer <log_type> <filename>");
+      System.out
+          .println("where <log_type> is either \"hadoop\" or \"system\" and <filename> is the path to the log file");
+      System.exit(0);
+    }
+
+    LogType logtype = null;
+
+    if (args[0].equalsIgnoreCase("-hadoop"))
+      logtype = LogType.HADOOP;
+    else if (args[0].equalsIgnoreCase("-system"))
+      logtype = LogType.SYSTEM;
+    else {
+      System.err.println("Invalid first argument.");
+      System.exit(0);
+    }
+
+    OfflineAnonymizer oa = new OfflineAnonymizer(logtype, args[1]);
+
+    try {
+      oa.anonymize();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    return;
+  }
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.util.Properties;
+import java.util.Calendar;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**********************************************************
+ * This class takes care of the information that needs to be
+ * persistently stored locally on nodes. Bookkeeping is done for the
+ * state of parsing of log files, so that the portion of the file that
+ * has already been parsed in previous calls will not be parsed again.
+ * For each log file, we maintain the byte offset of the last
+ * character parsed in previous passes. Also, the first entry in the
+ * log file is stored, so that FailMon can determine when a log file
+ * has been rotated (and thus parsing needs to start from the
+ * beginning of the file). We use a property file to store that
+ * information. For each log file we create a property keyed by the
+ * filename, the value of which contains the byte offset and first log
+ * entry separated by a SEPARATOR.
+ * 
+ **********************************************************/
+
+public class PersistentState {
+
+  private final static String SEPARATOR = "###";
+  
+  static String filename;
+  static Properties persData = new Properties();
+  
+  /**
+   * Read the state of parsing for all open log files from a property
+   * file.
+   * 
+   * @param fname the filename of the property file to be read
+   */
+
+  public static void readState(String fname) {
+
+    filename = fname;
+    
+    try {
+      persData.load(new FileInputStream(filename));
+    } catch (FileNotFoundException e1) {
+      // ignore
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+   /**
+   * Read and return the state of parsing for a particular log file.
+   * 
+   * @param fname the log file for which to read the state
+   */
+  public static ParseState getState(String fname) {
+    String [] fields = persData.getProperty(fname, "null" + SEPARATOR + "0").split(SEPARATOR, 2);
+    String firstLine;
+    long offset;
+    
+    if (fields.length < 2) {
+      System.err.println("Malformed persistent state data found");
+      Environment.logInfo("Malformed persistent state data found");
+      firstLine = null;
+      offset = 0;
+    } else {
+      firstLine = (fields[0].equals("null") ? null : fields[0]);
+      offset = Long.parseLong(fields[1]);
+    }
+
+    return new ParseState(fname, firstLine, offset);
+  }
+
+  /**
+   * Set the state of parsing for a particular log file.
+   * 
+   * @param state the ParseState to set
+   */
+  public static void setState(ParseState state) {
+
+    if (state == null) {
+      System.err.println("Null state found");
+      Environment.logInfo("Null state found");
+    }
+
+    persData.setProperty(state.filename, state.firstLine + SEPARATOR + state.offset);
+  }
+
+  /**
+   * Upadate the state of parsing for a particular log file.
+   * 
+   * @param filename the log file for which to update the state
+   * @param firstLine the first line of the log file currently
+   * @param offset the byte offset of the last character parsed
+   */ 
+  public static void updateState(String filename, String firstLine, long offset) {
+
+    ParseState ps = getState(filename);
+
+    if (firstLine != null)
+      ps.firstLine = firstLine;
+
+    ps.offset = offset;
+
+    setState(ps);
+  }
+
+  /**
+   * Write the state of parsing for all open log files to a property
+   * file on disk.
+   * 
+   * @param fname the filename of the property file to write to
+   */
+  public static void writeState(String fname) {
+    try {
+      persData.store(new FileOutputStream(fname), Calendar.getInstance().getTime().toString());
+    } catch (FileNotFoundException e1) {
+      e1.printStackTrace();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+  
+}
+
+/**********************************************************
+ * This class represents the state of parsing for a particular log
+ * file.
+ * 
+ **********************************************************/
+
+class ParseState {
+
+  public String filename;
+  public String firstLine;
+  public long offset;
+
+  public ParseState(String _filename, String _firstLine, long _offset) {
+    this.filename = _filename;
+    this.firstLine = _firstLine;
+    this.offset = _offset;
+  }
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.util.ArrayList;
+
+/**********************************************************
+* Runs a set of monitoring jobs once for the local node. The set of
+* jobs to be run is the intersection of the jobs specifed in the
+* configuration file and the set of jobs specified in the --only
+* command line argument.
+ **********************************************************/ 
+
+public class RunOnce {
+
+  LocalStore lstore;
+
+  ArrayList<MonitorJob> monitors;
+  
+  boolean uploading = true;
+  
+  public RunOnce(String confFile) {
+    
+    Environment.prepare(confFile);
+    
+    String localTmpDir;
+    
+    // running as a stand-alone application
+    localTmpDir = System.getProperty("java.io.tmpdir");
+    Environment.setProperty("local.tmp.dir", localTmpDir);
+        
+    monitors = Environment.getJobs();
+    lstore = new LocalStore();
+    uploading  = true;
+  }
+
+  private void filter (String [] ftypes) {
+    ArrayList<MonitorJob> filtered = new ArrayList<MonitorJob>();
+    boolean found;
+    
+    // filter out unwanted monitor jobs
+    for (MonitorJob job : monitors) {
+      found = false;
+      for (String ftype : ftypes)
+	if (job.type.equalsIgnoreCase(ftype))
+	    found = true;
+      if (found)
+	filtered.add(job);
+    }
+
+    // disable uploading if not requested
+    found = false;
+    for (String ftype : ftypes)
+      if (ftype.equalsIgnoreCase("upload"))
+	found = true;
+
+    if (!found)
+      uploading = false;
+    
+    monitors = filtered;
+  }
+  
+  private void run() {
+    
+    Environment.logInfo("Failmon started successfully.");
+
+    for (int i = 0; i < monitors.size(); i++) {
+      Environment.logInfo("Calling " + monitors.get(i).job.getInfo() + "...\t");
+      monitors.get(i).job.monitor(lstore);
+    }
+
+    if (uploading)
+      lstore.upload();
+
+    lstore.close();
+  }
+
+  public void cleanup() {
+    // nothing to be done
+  }
+
+  
+  public static void main (String [] args) {
+
+    String configFilePath = "./conf/failmon.properties";
+    String [] onlyList = null;
+    
+    // Parse command-line parameters
+    for (int i = 0; i < args.length - 1; i++) {
+      if (args[i].equalsIgnoreCase("--config"))
+	configFilePath = args[i + 1];
+      else if (args[i].equalsIgnoreCase("--only"))
+	onlyList = args[i + 1].split(",");
+    }
+
+    RunOnce ro = new RunOnce(configFilePath);
+    // only keep the requested types of jobs
+    if (onlyList != null)
+      ro.filter(onlyList);
+    // run once only
+    ro.run();
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**********************************************************
+ * Objects of this class parse the output of smartmontools to 
+ * gather information about the state of disks in the system. The
+ * smartmontools utility reads the S.M.A.R.T. attributes from
+ * the disk devices and reports them to the user. Note that since
+ * running smartctl requires superuser provileges, one should  
+ * grand sudo privileges to the running user for the command smartctl
+ * (without a password). Alternatively, one can set up a cron  job that 
+ * periodically dumps the output of smartctl into a user-readable file.
+ * See the configuration file for details.
+ *
+ **********************************************************/
+
+public class SMARTParser extends ShellParser {
+
+  String[] devices;
+
+  /**
+   * Constructs a SMARTParser and reads the list of disk 
+   * devices to query
+   */
+  public SMARTParser() {
+    super();
+    String devicesStr = Environment.getProperty("disks.list");
+    System.out.println("skato " + devicesStr);
+    if (devicesStr != null)
+      devices = devicesStr.split(",\\s*");
+  }
+
+  /**
+   * Reads and parses the output of smartctl for a specified disk and 
+   * creates an appropriate EventRecord that holds the desirable 
+   * information for it. Since the output of smartctl is different for 
+   * different kinds of disks, we try to identify as many attributes as 
+   * posssible for all known output formats. 
+   * 
+   * @param device the disk device name to query
+   * 
+   * @return the EventRecord created
+   */
+  public EventRecord query(String device) throws Exception {
+    String conf = Environment.getProperty("disks." + device + ".source");
+    StringBuffer sb;
+
+    if (conf == null)
+      sb = Environment.runCommand("sudo smartctl --all " + device);
+    else
+      sb = Environment.runCommand("cat " + conf);
+
+    EventRecord retval = new EventRecord(InetAddress.getLocalHost()
+        .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
+        .getHostName()), Calendar.getInstance(), "SMART", "Unknown",
+        (conf == null ? "sudo smartctl --all " + device : "file " + conf), "-");
+    // IBM SCSI disks
+    retval.set("model", findPattern("Device\\s*:\\s*(.*)", sb.toString(), 1));
+    retval.set("serial", findPattern("Serial\\s+Number\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("firmware", findPattern("Firmware\\s+Version\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("capacity", findPattern("User\\s+Capacity\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("status", findPattern("SMART\\s*Health\\s*Status:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("current_temperature", findPattern(
+        "Current\\s+Drive\\s+Temperature\\s*:\\s*(.*)", sb.toString(), 1));
+    retval.set("trip_temperature", findPattern(
+        "Drive\\s+Trip\\s+Temperature\\s*:\\s*(.*)", sb.toString(), 1));
+    retval.set("start_stop_count", findPattern(
+        "start\\s+stop\\s+count\\s*:\\s*(\\d*)", sb.toString(), 1));
+
+    String[] var = { "read", "write", "verify" };
+    for (String s : var) {
+      retval.set(s + "_ecc_fast", findPattern(s + "\\s*:\\s*(\\d*)", sb
+          .toString(), 1));
+      retval.set(s + "_ecc_delayed", findPattern(s
+          + "\\s*:\\s*(\\d+\\s+){1}(\\d+)", sb.toString(), 2));
+      retval.set(s + "_rereads", findPattern(
+          s + "\\s*:\\s*(\\d+\\s+){2}(\\d+)", sb.toString(), 2));
+      retval.set(s + "_GBs", findPattern(s
+          + "\\s*:\\s*(\\d+\\s+){5}(\\d+.?\\d*)", sb.toString(), 2));
+      retval.set(s + "_uncorrected",
+          findPattern(s + "\\s*:\\s*(\\d+\\s+){5}(\\d+.?\\d*){1}\\s+(\\d+)", sb
+              .toString(), 3));
+    }
+
+    // Hitachi IDE, SATA
+    retval.set("model", findPattern("Device\\s*Model\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("serial", findPattern("Serial\\s+number\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("protocol", findPattern("Transport\\s+protocol\\s*:\\s*(.*)", sb
+        .toString(), 1));
+    retval.set("status", "PASSED".equalsIgnoreCase(findPattern(
+        "test\\s*result\\s*:\\s*(.*)", sb.toString(), 1)) ? "OK" : "FAILED");
+
+    readColumns(retval, sb);
+
+    return retval;
+  }
+
+  /**
+   * Reads attributes in the following format:
+   * 
+   * ID# ATTRIBUTE_NAME          FLAG     VALUE WORST THRESH TYPE      UPDATED  WHEN_FAILED RAW_VALUE
+   * 3 Spin_Up_Time             0x0027   180   177   063    Pre-fail  Always       -       10265
+   * 4 Start_Stop_Count         0x0032   253   253   000    Old_age   Always       -       34
+   * 5 Reallocated_Sector_Ct    0x0033   253   253   063    Pre-fail  Always       -       0
+   * 6 Read_Channel_Margin      0x0001   253   253   100    Pre-fail  Offline      -       0
+   * 7 Seek_Error_Rate          0x000a   253   252   000    Old_age   Always       -       0
+   * 8 Seek_Time_Performance    0x0027   250   224   187    Pre-fail  Always       -       53894
+   * 9 Power_On_Minutes         0x0032   210   210   000    Old_age   Always       -       878h+00m
+   * 10 Spin_Retry_Count        0x002b   253   252   157    Pre-fail  Always       -       0
+   * 11 Calibration_Retry_Count 0x002b   253   252   223    Pre-fail  Always       -       0
+   * 12 Power_Cycle_Count       0x0032   253   253   000    Old_age   Always       -       49
+   * 192 PowerOff_Retract_Count 0x0032   253   253   000    Old_age   Always       -       0
+   * 193 Load_Cycle_Count       0x0032   253   253   000    Old_age   Always       -       0
+   * 194 Temperature_Celsius    0x0032   037   253   000    Old_age   Always       -       37
+   * 195 Hardware_ECC_Recovered 0x000a   253   252   000    Old_age   Always       -       2645
+   * 
+   * This format is mostly found in IDE and SATA disks.
+   * 
+   * @param er the EventRecord in which to store attributes found
+   * @param sb the StringBuffer with the text to parse
+   * 
+   * @return the EventRecord in which new attributes are stored.
+   */
+  private EventRecord readColumns(EventRecord er, StringBuffer sb) {
+
+    Pattern pattern = Pattern.compile("^\\s{0,2}(\\d{1,3}\\s+.*)$",
+        Pattern.MULTILINE);
+    Matcher matcher = pattern.matcher(sb);
+
+    while (matcher.find()) {
+      String[] tokens = matcher.group(1).split("\\s+");
+      boolean failed = false;
+      // check if this attribute is a failed one
+      if (!tokens[8].equals("-"))
+        failed = true;
+      er.set(tokens[1].toLowerCase(), (failed ? "FAILED:" : "") + tokens[9]);
+    }
+
+    return er;
+  }
+
+  /**
+   * Invokes query() to do the parsing and handles parsing errors for 
+   * each one of the disks specified in the configuration. 
+   * 
+   * @return an array of EventRecords that holds one element that represents
+   * the current state of the disk devices.
+   */
+  public EventRecord[] monitor() {
+    ArrayList<EventRecord> recs = new ArrayList<EventRecord>();
+
+    for (String device : devices) {
+      try {
+        recs.add(query(device));
+      } catch (Exception e) {
+        e.printStackTrace();
+      }
+    }
+
+    EventRecord[] T = new EventRecord[recs.size()];
+
+    return recs.toArray(T);
+  }
+  
+  /**
+   * Return a String with information about this class
+   * 
+   * @return A String describing this class
+   */
+  public String getInfo() {
+    String retval = "S.M.A.R.T. disk attributes parser for disks ";
+    for (String device : devices)
+      retval += device + " ";
+    return retval;
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.net.InetAddress;
+import java.util.Calendar;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**********************************************************
+ * Objects of this class parse the output of the lm-sensors utility 
+ * to gather information about fan speed, temperatures for cpus
+ * and motherboard etc.
+ *
+ **********************************************************/
+
+public class SensorsParser extends ShellParser {
+
+  /**
+   * Reads and parses the output of the 'sensors' command 
+   * and creates an appropriate EventRecord that holds 
+   * the desirable information.
+   * 
+   * @param s unused parameter
+   * 
+   * @return the EventRecord created
+   */
+  public EventRecord query(String s) throws Exception {
+    StringBuffer sb;
+
+    //sb = Environment.runCommand("sensors -A");
+     sb = Environment.runCommand("cat sensors.out");
+
+    EventRecord retval = new EventRecord(InetAddress.getLocalHost()
+        .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
+        .getHostName()), Calendar.getInstance(), "lm-sensors", "Unknown",
+        "sensors -A", "-");
+    readGroup(retval, sb, "fan");
+    readGroup(retval, sb, "in");
+    readGroup(retval, sb, "temp");
+    readGroup(retval, sb, "Core");
+
+    return retval;
+  }
+
+  /**
+   * Reads and parses lines that provide the output
+   * of a group of sensors with the same functionality.
+   * 
+   * @param er the EventRecord to which the new attributes are added
+   * @param sb the text to parse
+   * @param prefix a String prefix specifying the common prefix of the
+   * sensors' names in the group (e.g. "fan", "in", "temp"
+   * 
+   * @return the EventRecord created
+   */
+  private EventRecord readGroup(EventRecord er, StringBuffer sb, String prefix) {
+
+    Pattern pattern = Pattern.compile(".*(" + prefix
+        + "\\s*\\d*)\\s*:\\s*(\\+?\\d+)", Pattern.MULTILINE);
+    Matcher matcher = pattern.matcher(sb);
+
+    while (matcher.find())
+      er.set(matcher.group(1), matcher.group(2));
+
+    return er;
+  }
+
+  /**
+   * Invokes query() to do the parsing and handles parsing errors. 
+   * 
+   * @return an array of EventRecords that holds one element that represents
+   * the current state of the hardware sensors
+   */
+  public EventRecord[] monitor() {
+    EventRecord[] recs = new EventRecord[1];
+
+    try {
+      recs[0] = query(null);
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+
+    return recs;
+  }
+  
+  /**
+   * Return a String with information about this class
+   * 
+   * @return A String describing this class
+   */
+  public String getInfo() {
+    return ("lm-sensors parser");
+  }
+
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.text.DateFormat;
+
+/**********************************************************
+ * Objects of this class hold the serialized representations
+ * of EventRecords. A SerializedRecord is essentially an EventRecord
+ * with all its property values converted to strings. It also provides 
+ * some convenience methods for printing the property fields in a 
+ * more readable way.
+ *
+ **********************************************************/
+
+public class SerializedRecord {
+
+  HashMap<String, String> fields;
+  private static DateFormat dateFormatter =
+    DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG);;
+
+  /**
+   * Create the SerializedRecord given an EventRecord.
+   */
+  
+  public SerializedRecord(EventRecord source) {
+    fields = new HashMap<String, String>();
+    fields.clear();
+
+    for (String k : source.getMap().keySet()) {
+      ArrayList<String> strs = getStrings(source.getMap().get(k));
+      if (strs.size() == 1)
+        fields.put(k, strs.get(0));
+      else
+        for (int i = 0; i < strs.size(); i++)
+          fields.put(k + "#" + i, strs.get(i));
+    }
+
+  }
+
+  /**
+   * Extract String representations from an Object.
+   * 
+   * @param o the input object
+   * 
+   * @return an ArrayList that contains Strings found in o
+   */
+  private ArrayList<String> getStrings(Object o) {
+    ArrayList<String> retval = new ArrayList<String>();
+    retval.clear();
+    if (o == null)
+      retval.add("null");
+    else if (o instanceof String)
+      retval.add((String) o);
+    else if (o instanceof Calendar)
+      retval.add(dateFormatter.format(((Calendar) o).getTime()));
+    else if (o instanceof InetAddress[])
+      for (InetAddress ip : ((InetAddress[]) o))
+        retval.add(ip.getHostAddress());
+    else if (o instanceof String[])
+      for (String s : (String []) o)
+        retval.add(s);
+    else
+      retval.add(o.toString());
+
+    return retval;
+  }
+
+  /**
+   * Set the value of a property of the EventRecord.
+   * 
+   * @param fieldName the name of the property to set
+   * @param fieldValue the value of the property to set
+   * 
+   */
+  public void set(String fieldName, String fieldValue) {
+    fields.put(fieldName, fieldValue);
+  }
+
+  /**
+   * Get the value of a property of the EventRecord.
+   * If the property with the specific key is not found,
+   * null is returned.
+   * 
+   * @param fieldName the name of the property to get.
+   */
+  public String get(String fieldName) {
+    return fields.get(fieldName);
+  }
+
+  /**
+   * Arrange the keys to provide a more readable printing order:
+   * first goes the timestamp, then the hostname and then the type, followed
+   * by all other keys found.
+   * 
+   * @param keys The input ArrayList of keys to re-arrange.
+   */
+  public static void arrangeKeys(ArrayList<String> keys) {
+    move(keys, "timestamp", 0);
+    move(keys, "hostname", 1);
+    move(keys, "type", 2);
+  }
+
+  private static void move(ArrayList<String> keys, String key, int position) {
+    int cur = keys.indexOf(key);
+    if (cur == -1)
+      return;
+    keys.set(cur, keys.get(position));
+    keys.set(position, key);
+  }
+
+  /**
+   * Check if the SerializedRecord is a valid one, i.e., whether
+   * it represents meaningful metric values.
+   * 
+   * @return true if the EventRecord is a valid one, false otherwise.
+   */
+  public boolean isValid() {
+    return !("invalid".equalsIgnoreCase(fields.get("hostname")));
+  }
+
+  
+  /**
+   * Creates and returns a string reperssentation of the object
+   * 
+   * @return a String representing the object
+   */
+
+  public String toString() {
+    String retval = "";
+    ArrayList<String> keys = new ArrayList<String>(fields.keySet());
+    arrangeKeys(keys);
+
+    for (int i = 0; i < keys.size(); i++) {
+      String value = fields.get(keys.get(i));
+      if (value == null)
+        retval += keys.get(i) + ":\tnull\n";
+      else
+        retval += keys.get(i) + ":\t" + value + "\n";
+    }
+    return retval;
+  }
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**********************************************************
+ * Objects of this class parse the output of system command-line
+ * utilities that can give information about the state of  
+ * various hardware components in the system. Typically, each such
+ * object either invokes a command and reads its output or reads the 
+ * output of one such command from a file on the disk. Currently 
+ * supported utilities include ifconfig, smartmontools, lm-sensors,
+ * /proc/cpuinfo.
+ *
+ **********************************************************/
+
+public abstract class ShellParser implements Monitored {
+
+  /**
+   * Find the first occurence ofa pattern in a piece of text 
+   * and return a specific group.
+   * 
+   *  @param strPattern the regular expression to match
+   *  @param text the text to search
+   *  @param grp the number of the matching group to return
+   *  
+   *  @return a String containing the matched group of the regular expression
+   */
+  protected String findPattern(String strPattern, String text, int grp) {
+
+    Pattern pattern = Pattern.compile(strPattern, Pattern.MULTILINE);
+    Matcher matcher = pattern.matcher(text);
+
+    if (matcher.find(0))
+      return matcher.group(grp);
+
+    return null;
+  }
+
+  /**
+   * Finds all occurences of a pattern in a piece of text and returns 
+   * the matching groups.
+   * 
+   *  @param strPattern the regular expression to match
+   *  @param text the text to search
+   *  @param grp the number of the matching group to return
+   *  @param separator the string that separates occurences in the returned value
+   *  
+   *  @return a String that contains all occurences of strPattern in text, 
+   *  separated by separator
+   */
+  protected String findAll(String strPattern, String text, int grp,
+      String separator) {
+
+    String retval = "";
+    boolean firstTime = true;
+
+    Pattern pattern = Pattern.compile(strPattern);
+    Matcher matcher = pattern.matcher(text);
+
+    while (matcher.find()) {
+      retval += (firstTime ? "" : separator) + matcher.group(grp);
+      firstTime = false;
+    }
+
+    return retval;
+  }
+
+  /**
+   * Insert all EventRecords that can be extracted for
+   * the represented hardware component into a LocalStore.
+   * 
+   * @param ls the LocalStore into which the EventRecords 
+   * are to be stored.
+   */
+  public void monitor(LocalStore ls) {
+    ls.insert(monitor());
+  }
+
+  abstract public EventRecord[] monitor();
+
+  abstract public EventRecord query(String s) throws Exception;
+
+}

Added: hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java?rev=686181&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java (added)
+++ hadoop/core/trunk/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java Fri Aug 15 02:04:07 2008
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.contrib.failmon;
+
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**********************************************************
+ * An object of this class parses a Unix system log file to create
+ * appropriate EventRecords. Currently, only the syslogd logging 
+ * daemon is supported.
+ * 
+ **********************************************************/
+
+public class SystemLogParser extends LogParser {
+
+  static String[] months = { "January", "February", "March", "April", "May",
+      "June", "July", "August", "September", "October", "November", "December" };
+  /**
+   * Create a new parser object .
+   */  
+  public SystemLogParser(String fname) {
+    super(fname);
+    if ((dateformat = Environment.getProperty("log.system.dateformat")) == null)
+      dateformat = "(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\\s+(\\d+)";
+    if ((timeformat = Environment.getProperty("log.system.timeformat")) == null)
+      timeformat = "\\d{2}:\\d{2}:\\d{2}";
+  }
+
+  /**
+   * Parses one line of the log. If the line contains a valid 
+   * log entry, then an appropriate EventRecord is returned, after all
+   * relevant fields have been parsed.
+   *
+   *  @param line the log line to be parsed
+   *    
+   *  @return the EventRecord representing the log entry of the line. If 
+   *  the line does not contain a valid log entry, then the EventRecord 
+   *  returned has isValid() = false. When the end-of-file has been reached,
+   *  null is returned to the caller.
+   */
+  public EventRecord parseLine(String line) throws IOException {
+
+    EventRecord retval = null;
+
+    if (line != null) {
+      // process line
+      String patternStr = "(" + dateformat + ")";
+      patternStr += "\\s+";
+      patternStr += "(" + timeformat + ")";
+      patternStr += "\\s+(\\S*)\\s"; // for hostname
+//      patternStr += "\\s*([\\w+\\.?]+)"; // for source
+      patternStr += ":?\\s*(.+)"; // for the message
+      Pattern pattern = Pattern.compile(patternStr);
+      Matcher matcher = pattern.matcher(line);
+      if (matcher.find() && matcher.groupCount() >= 0) {
+        retval = new EventRecord(hostname, ips, parseDate(matcher.group(1),
+            matcher.group(4)), "SystemLog", "Unknown", // loglevel
+            "Unknown", // source
+            matcher.group(6)); // message
+      } else {
+        retval = new EventRecord();
+      }
+    }
+
+    return retval;
+  }
+
+  /**
+   * Parse a date found in the system log.
+   * 
+   * @return a Calendar representing the date
+   */
+  protected Calendar parseDate(String strDate, String strTime) {
+    Calendar retval = Calendar.getInstance();
+    // set date
+    String[] fields = strDate.split("\\s+");
+    retval.set(Calendar.MONTH, parseMonth(fields[0]));
+    retval.set(Calendar.DATE, Integer.parseInt(fields[1]));
+    // set time
+    fields = strTime.split(":");
+    retval.set(Calendar.HOUR_OF_DAY, Integer.parseInt(fields[0]));
+    retval.set(Calendar.MINUTE, Integer.parseInt(fields[1]));
+    retval.set(Calendar.SECOND, Integer.parseInt(fields[2]));
+    return retval;
+  }
+
+  /**
+   * Convert the name of a month to the corresponding int value.
+   * 
+   * @return the int representation of the month.
+   */
+  private int parseMonth(String month) {
+    for (int i = 0; i < months.length; i++)
+      if (months[i].startsWith(month))
+        return i;
+    return -1;
+  }
+  
+  /**
+   * Return a String with information about this class
+   * 
+   * @return A String describing this class
+   */
+  public String getInfo() {
+    return ("System Log Parser for file : " + file.getAbsoluteFile());
+  }
+}



Mime
View raw message