chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r761510 - in /hadoop/chukwa/trunk: bin/ conf/ src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ src/web/hicc/descriptors/
Date Fri, 03 Apr 2009 03:29:59 GMT
Author: eyang
Date: Fri Apr  3 03:29:57 2009
New Revision: 761510

URL: http://svn.apache.org/viewvc?rev=761510&view=rev
Log:
CHUKWA-29.  Added TaskTracker and DataNode client trace log parser and database loader.  (Chris
Douglas via Eric Yang)

Added:
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ClientTraceProcessor.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
    hadoop/chukwa/trunk/src/web/hicc/descriptors/client_trace.descriptor
Modified:
    hadoop/chukwa/trunk/bin/processSinkFiles.sh
    hadoop/chukwa/trunk/conf/chukwa-demux-conf.xml.template
    hadoop/chukwa/trunk/conf/chukwa-env.sh.template
    hadoop/chukwa/trunk/conf/database_create_tables
    hadoop/chukwa/trunk/conf/hadoop-log4j.properties
    hadoop/chukwa/trunk/conf/mdl.xml.template

Modified: hadoop/chukwa/trunk/bin/processSinkFiles.sh
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/bin/processSinkFiles.sh?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/bin/processSinkFiles.sh (original)
+++ hadoop/chukwa/trunk/bin/processSinkFiles.sh Fri Apr  3 03:29:57 2009
@@ -124,7 +124,7 @@
   debugDate=`date `
   echo "$debugDate done with demux job" >> "${CHUKWA_LOG_DIR}/mr.log"
    
-  ${JAVA_HOME}/bin/java -Djava.library.path=${JAVA_LIBRARY_PATH} -DCHUKWA_HOME=${CHUKWA_HOME}
-DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR} -Dlog4j.configuration=log4j.properties
-classpath ${CLASSPATH}:${CHUKWA_CORE}:${HADOOP_JAR}:${COMMON}:${tools}:${CHUKWA_HOME}/conf
org.apache.hadoop.chukwa.extraction.database.DatabaseLoader "${srcDoneHdfsDir}/demux" SystemMetrics
Df Hadoop_dfs Hadoop_jvm Hadoop_mapred Hadoop_rpc MSSRGraph MRJobCounters NodeActivity HodJob
HodMachine Hadoop_dfs_FSDirectory Hadoop_dfs_FSNamesystem Hadoop_dfs_datanode Hadoop_dfs_namenode
Hadoop_jvm_metrics Hadoop_mapred_job Hadoop_mapred_jobtracker Hadoop_mapred_shuffleOutput
Hadoop_mapred_tasktracker Hadoop_rpc_metrics JobData TaskData HDFSUsage
+  ${JAVA_HOME}/bin/java -Djava.library.path=${JAVA_LIBRARY_PATH} -DCHUKWA_HOME=${CHUKWA_HOME}
-DCHUKWA_CONF_DIR=${CHUKWA_CONF_DIR} -DCHUKWA_LOG_DIR=${CHUKWA_LOG_DIR} -Dlog4j.configuration=log4j.properties
-classpath ${CLASSPATH}:${CHUKWA_CORE}:${HADOOP_JAR}:${COMMON}:${tools}:${CHUKWA_HOME}/conf
org.apache.hadoop.chukwa.extraction.database.DatabaseLoader "${srcDoneHdfsDir}/demux" SystemMetrics
Df Hadoop_dfs Hadoop_jvm Hadoop_mapred Hadoop_rpc MSSRGraph MRJobCounters NodeActivity HodJob
HodMachine Hadoop_dfs_FSDirectory Hadoop_dfs_FSNamesystem Hadoop_dfs_datanode Hadoop_dfs_namenode
Hadoop_jvm_metrics Hadoop_mapred_job Hadoop_mapred_jobtracker Hadoop_mapred_shuffleOutput
Hadoop_mapred_tasktracker Hadoop_rpc_metrics JobData TaskData HDFSUsage ClientTrace
   endDbLoaderTime=`date +%s`
   dbLoaderDuration=$(( $endDbLoaderTime - $endDemuxTime))
   echo "dbLoaderDuration $dbLoaderDuration" >> "${CHUKWA_LOG_DIR}/mr.log"

Modified: hadoop/chukwa/trunk/conf/chukwa-demux-conf.xml.template
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/chukwa-demux-conf.xml.template?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/chukwa-demux-conf.xml.template (original)
+++ hadoop/chukwa/trunk/conf/chukwa-demux-conf.xml.template Fri Apr  3 03:29:57 2009
@@ -117,6 +117,12 @@
    </property>
     
    <property>
+     <name>ClientTrace</name>
+     <value>org.apache.hadoop.chukwa.extraction.demux.processor.mapper.ClientTraceProcessor</value>
+     <description>Parser class for TaskTracker and DataNode clienttrace data</description>
+   </property>
+ 
+   <property>
     <name>HDFSUsage</name>
     <value>org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JPluginMapper</value>
     <description></description>

Modified: hadoop/chukwa/trunk/conf/chukwa-env.sh.template
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/chukwa-env.sh.template?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/chukwa-env.sh.template (original)
+++ hadoop/chukwa/trunk/conf/chukwa-env.sh.template Fri Apr  3 03:29:57 2009
@@ -47,6 +47,9 @@
 # The location of chukwa logs, defaults to CHUKWA_HOME/logs
 #export CHUKWA_LOG_DIR="/tmp/chukwa-log-dir"
 
+# The location to store chukwa data, defaults to CHUKWA_HOME/data
+#export CHUKWA_DATA_DIR="${CHUKWA_HOME}/data"
+
 # The location of torque pbsnodes command
 #export nodeActivityCmde="
 

Modified: hadoop/chukwa/trunk/conf/database_create_tables
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/database_create_tables?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/database_create_tables (original)
+++ hadoop/chukwa/trunk/conf/database_create_tables Fri Apr  3 03:29:57 2009
@@ -592,3 +592,29 @@
     primary key(timestamp, user),
     index(timestamp)
 ) ENGINE=InnoDB;
+
+create table if not exists util_template (
+    timestamp timestamp default CURRENT_TIMESTAMP,
+    user VARCHAR(20),
+    queue VARCHAR(20),
+    bytes bigint,
+    slot_hours double,
+    primary key(user, timestamp),
+    index(queue)
+) ENGINE=InnoDB;
+
+create table if not exists ClientTrace_template (
+    Timestamp timestamp default 0,
+    local_hdfs_read double,
+    intra_rack_hdfs_read double,
+    inter_rack_hdfs_read double,
+    local_hdfs_write double,
+    intra_rack_hdfs_write double,
+    inter_rack_hdfs_write double,
+    local_mapred_shuffle double,
+    intra_rack_mapred_shuffle double,
+    inter_rack_mapred_shuffle double,
+    primary key(timestamp),
+     index(timestamp)
+) ENGINE=InnoDB;
+

Modified: hadoop/chukwa/trunk/conf/hadoop-log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/hadoop-log4j.properties?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/hadoop-log4j.properties (original)
+++ hadoop/chukwa/trunk/conf/hadoop-log4j.properties Fri Apr  3 03:29:57 2009
@@ -51,8 +51,32 @@
 log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
 log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout
 log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.logger.org.apache.hadoop.fs.FSNamesystem.audit=INFO,DRFAAUDIT
-log4j.additivity.org.apache.hadoop.fs.FSNamesystem.audit=false
+log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=INFO,DRFAAUDIT
+log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
+
+# ClientTrace (Shuffle bytes)
+log4j.appender.MR_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.MR_CLIENTTRACE.File=${hadoop.log.dir}/mr_clienttrace.log
+log4j.appender.MR_CLIENTTRACE.recordType=ClientTrace
+log4j.appender.MR_CLIENTTRACE.chukwaClientHostname=localhost
+log4j.appender.MR_CLIENTTRACE.chukwaClientPortNum=9093
+log4j.appender.MR_CLIENTTRACE.DatePattern=.yyyy-MM-dd
+log4j.appender.MR_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
+log4j.appender.MR_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.logger.org.apache.hadoop.mapred.TaskTracker.clienttrace=INFO,MR_CLIENTTRACE
+log4j.additivity.org.apache.hadoop.mapred.TaskTracker.clienttrace=false
+
+# ClientTrace (HDFS bytes)
+log4j.appender.HDFS_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.HDFS_CLIENTTRACE.File=${hadoop.log.dir}/hdfs_clienttrace.log
+log4j.appender.HDFS_CLIENTTRACE.recordType=ClientTrace
+log4j.appender.HDFS_CLIENTTRACE.chukwaClientHostname=localhost
+log4j.appender.HDFS_CLIENTTRACE.chukwaClientPortNum=9093
+log4j.appender.HDFS_CLIENTTRACE.DatePattern=.yyyy-MM-dd
+log4j.appender.HDFS_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
+log4j.appender.HDFS_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.logger.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=INFO,HDFS_CLIENTTRACE
+log4j.additivity.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=false
 
 #
 # console

Modified: hadoop/chukwa/trunk/conf/mdl.xml.template
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/mdl.xml.template?rev=761510&r1=761509&r2=761510&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/mdl.xml.template (original)
+++ hadoop/chukwa/trunk/conf/mdl.xml.template Fri Apr  3 03:29:57 2009
@@ -174,6 +174,12 @@
   <value>user_util</value>
 </property>
 
+<property>
+  <name>report.db.name.clienttrace</name>
+  <value>ClientTrace</value>
+  <description></description>
+</property>
+
 <!-- System Metrics Config -->
 <property>
   <name>report.db.primary.key.systemmetrics</name>
@@ -499,6 +505,57 @@
   <value>host</value>
 </property>
 
+<!-- clienttrace -->
+<property>
+  <name>report.db.primary.key.clienttrace</name>
+  <value>timestamp</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.local_hdfs_read</name>
+  <value>local_hdfs_read</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.intra_rack_hdfs_read</name>
+  <value>intra_rack_hdfs_read</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.inter_rack_hdfs_read</name>
+  <value>inter_rack_hdfs_read</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.local_hdfs_write</name>
+  <value>local_hdfs_write</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.intra_rack_hdfs_write</name>
+  <value>intra_rack_hdfs_write</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.inter_rack_hdfs_write</name>
+  <value>inter_rack_hdfs_write</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.local_mapred_shuffle</name>
+  <value>local_mapred_shuffle</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.intra_rack_mapred_shuffle</name>
+  <value>intra_rack_mapred_shuffle</value>
+</property>
+
+<property>
+  <name>metric.clienttrace.inter_rack_mapred_shuffle</name>
+  <value>inter_rack_mapred_shuffle</value>
+</property>
+
 <!-- dfs name node metrics -->
 <property>
   <name>report.db.primary.key.hadoop_dfs_namenode</name>
@@ -1771,5 +1828,10 @@
   <value>5,30,180,720</value>
 </property>
 
+<property>
+  <name>consolidator.table.ClientTrace</name>
+  <value>5,30,180,720</value>
+</property>
+
 </configuration>
 

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ClientTraceProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ClientTraceProcessor.java?rev=761510&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ClientTraceProcessor.java
(added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ClientTraceProcessor.java
Fri Apr  3 03:29:57 2009
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.Logger;
+
+public class ClientTraceProcessor extends AbstractProcessor {
+
+  private static final String recordType = "ClientTrace";
+  private final SimpleDateFormat sdf =
+    new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
+  private final Matcher kvMatcher;
+  private final Matcher idMatcher;
+  private final Matcher ipMatcher;
+  // extract date, source
+  private final Pattern idPattern =
+    Pattern.compile("^(.{23}).*clienttrace.*");
+  // extract "key: value" pairs
+  private final Pattern kvPattern =
+    Pattern.compile("\\s+(\\w+):\\s+([^,]+)");
+  private final Pattern ipPattern =
+    Pattern.compile("[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+");
+
+  public ClientTraceProcessor() {
+    super();
+    kvMatcher = kvPattern.matcher("");
+    idMatcher = idPattern.matcher("");
+    ipMatcher = ipPattern.matcher("");
+  }
+
+  public enum Locality {
+    LOCAL("local"), INTRA("intra_rack"), INTER("inter_rack");
+    String lbl;
+    Locality(String lbl) {
+      this.lbl = lbl;
+    }
+    public String getLabel() {
+      return lbl;
+    }
+  };
+
+  protected Locality getLocality(String src, String dst) throws Exception {
+    if (null == src || null == dst) {
+      throw new IOException("Missing src/dst");
+    }
+    ipMatcher.reset(src);
+    if (!ipMatcher.find()) {
+      throw new IOException("Could not find src");
+    }
+    byte[] srcIP = InetAddress.getByName(ipMatcher.group(0)).getAddress();
+    ipMatcher.reset(dst);
+    if (!ipMatcher.find()) {
+      throw new IOException("Could not find dst");
+    }
+    byte[] dstIP = InetAddress.getByName(ipMatcher.group(0)).getAddress();
+    for (int i = 0; i < 4; ++i) {
+      if (srcIP[i] != dstIP[i]) {
+        return (3 == i && (srcIP[i] & 0xC0) == (dstIP[i] & 0xC0))
+          ? Locality.INTRA
+          : Locality.INTER;
+      }
+    }
+    return Locality.LOCAL;
+  }
+
+  @Override
+  public void parse(String recordEntry,
+      OutputCollector<ChukwaRecordKey,ChukwaRecord> output, Reporter reporter)
+      throws Throwable {
+    try {
+      idMatcher.reset(recordEntry);
+      long ms;
+      if (idMatcher.find()) {
+        ms = sdf.parse(idMatcher.group(1)).getTime();
+      } else {
+        throw new IOException("Could not find date/source");
+      }
+      kvMatcher.reset(recordEntry);
+      if (!kvMatcher.find()) {
+        throw new IOException("Failed to find record");
+      }
+      ChukwaRecord rec = new ChukwaRecord();
+      do {
+        rec.add(kvMatcher.group(1), kvMatcher.group(2));
+      } while (kvMatcher.find());
+      Locality loc = getLocality(rec.getValue("src"), rec.getValue("dest"));
+      rec.add("locality", loc.getLabel());
+
+      calendar.setTimeInMillis(ms);
+      calendar.set(Calendar.SECOND, 0);
+      calendar.set(Calendar.MILLISECOND, 0);
+      ms = calendar.getTimeInMillis();
+      calendar.set(Calendar.MINUTE, 0);
+      key.setKey(calendar.getTimeInMillis() + "/" + loc.getLabel() + "/" +
+                 rec.getValue("op").toLowerCase() + "/" + ms);
+      key.setReduceType("ClientTrace");
+      rec.setTime(ms);
+
+      rec.add(Record.tagsField, chunk.getTags());
+      rec.add(Record.sourceField, chunk.getSource());
+      rec.add(Record.applicationField, chunk.getApplication());
+      output.collect(key, rec);
+
+    } catch (ParseException e) {
+      log.warn("Unable to parse the date in DefaultProcessor ["
+          + recordEntry + "]", e);
+      e.printStackTrace();
+      throw e;
+    } catch (IOException e) {
+      log.warn("Unable to collect output in DefaultProcessor ["
+          + recordEntry + "]", e);
+      e.printStackTrace();
+      throw e;
+    }
+  }
+
+  public String getDataType() {
+    return recordType;
+  }
+}

Added: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java?rev=761510&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
(added)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/demux/processor/reducer/ClientTrace.java
Fri Apr  3 03:29:57 2009
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.demux.processor.reducer;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.HashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.Iterator;
+
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.Logger;
+
+public class ClientTrace implements ReduceProcessor {
+
+  static final Logger log = Logger.getLogger(ClientTrace.class);
+
+  @Override
+  public String getDataType() {
+    return this.getClass().getName();
+  }
+
+  @Override
+  public void process(ChukwaRecordKey key,
+            Iterator<ChukwaRecord> values,
+            OutputCollector<ChukwaRecordKey, ChukwaRecord> output,
+            Reporter reporter) {
+    try {
+      long bytes = 0L;
+      ChukwaRecord rec = null;
+      while (values.hasNext()) {
+        rec = values.next();
+        bytes += Long.valueOf(rec.getValue("bytes"));
+      }
+      if (null == rec) {
+        return;
+      }
+      ChukwaRecord emit = new ChukwaRecord();
+      emit.add(Record.tagsField, rec.getValue(Record.tagsField));
+      emit.add(Record.sourceField, "undefined"); // TODO
+      emit.add(Record.applicationField, rec.getValue(Record.applicationField));
+
+      String[] k = key.getKey().split("/");
+      emit.add(k[1] + "_" + k[2], String.valueOf(bytes));
+      emit.setTime(Long.valueOf(k[3]));
+      output.collect(key, emit);
+
+    } catch (IOException e) {
+      log.warn("Unable to collect output in SystemMetricsReduceProcessor [" + key + "]",
e);
+    }
+  }
+}

Added: hadoop/chukwa/trunk/src/web/hicc/descriptors/client_trace.descriptor
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/descriptors/client_trace.descriptor?rev=761510&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/descriptors/client_trace.descriptor (added)
+++ hadoop/chukwa/trunk/src/web/hicc/descriptors/client_trace.descriptor Fri Apr  3 03:29:57
2009
@@ -0,0 +1,56 @@
+{
+"id":"client_trace",
+"title":"Client Trace",
+"version":"0.1",
+"categories":"Hadoop,Status",
+"module":"iframe/jsp/single-series-chart-javascript.jsp",
+"description":"Display Hadoop data transfer statistics",
+"screendump":"\/images\/server_load.gif",
+"refresh":"15",
+"parameters":[
+{"name":"table","type":"string","value":"ClientTrace","edit":"0"},
+{"name":"period","type":"select","value":"","label":"Period","options":[
+{"label":"Use Time Widget","value":""},
+{"label":"Last 1 Hour","value":"last1hr"},
+{"label":"Last 2 Hours","value":"last2hr"},
+{"label":"Last 3 Hours","value":"last3hr"},
+{"label":"Last 6 Hours","value":"last6hr"},
+{"label":"Last 12 Hours","value":"last12hr"},
+{"label":"Last 24 Hours","value":"last24hr"},
+{"label":"Yesterday","value":"yesterday"},
+{"label":"Last 7 Days","value":"last7d"},
+{"label":"Last 30 Days","value":"last30d"}
+]},
+{"name":"metric","type":"select_multiple","value":"local_hdfs_read","label":"Metric","options":[
+{"label":"Local HDFS Read","value":"local_hdfs_read"},
+{"label":"Intra-rack HDFS Read","value":"intra_rack_hdfs_read"},
+{"label":"Inter-rack HDFS Read","value":"inter_rack_hdfs_read"},
+{"label":"Local HDFS Write","value":"local_hdfs_write"},
+{"label":"Intra-rack HDFS Write","value":"intra_rack_hdfs_write"},
+{"label":"Inter-rack HDFS Write","value":"inter_rack_hdfs_write"},
+{"label":"Local Shuffle","value":"local_mapred_shuffle"},
+{"label":"Intra-rack Shuffle","value":"intra_rack_mapred_shuffle"},
+{"label":"Inter-rack Shuffle","value":"inter_rack_mapred_shuffle"},
+]},
+{"name":"width","type":"select","value":"300","label":"Width","options":[
+{"label":"300","value":"300"},
+{"label":"400","value":"400"},
+{"label":"500","value":"500"},
+{"label":"600","value":"600"},
+{"label":"800","value":"800"},
+{"label":"1000","value":"1000"},
+{"label":"1200","value":"1200"}
+]},
+{"name":"height","type":"select","value":"200","label":"Height","options":[
+{"label":"200","value":"200"},
+{"label":"400","value":"400"},
+{"label":"600","value":"600"},
+{"label":"800","value":"800"},
+{"label":"1000","value":"1000"}
+]},
+{"name":"legend","type":"radio","value":"on","label":"Show Legends","options":[
+{"label":"On","value":"on"},
+{"label":"Off","value":"off"}
+]}
+]
+}



Mime
View raw message