chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r1206918 - in /incubator/chukwa/trunk: ./ conf/ src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ src/main/web/hicc/descriptors/ src/main/web/hicc/js/workspace/
Date Sun, 27 Nov 2011 23:26:58 GMT
Author: eyang
Date: Sun Nov 27 23:26:48 2011
New Revision: 1206918

URL: http://svn.apache.org/viewvc?rev=1206918&view=rev
Log:
CHUKWA-588. Updated hbase.schema to match Hadoop 0.20.205.0 metrics and job summary. (Eric
Yang)

Added:
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
Removed:
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/sql_client.descriptor
Modified:
    incubator/chukwa/trunk/CHANGES.txt
    incubator/chukwa/trunk/conf/chukwa-collector-conf.xml
    incubator/chukwa/trunk/conf/chukwa-demux-conf.xml
    incubator/chukwa/trunk/conf/hadoop-log4j.properties
    incubator/chukwa/trunk/conf/hadoop-metrics2.properties
    incubator/chukwa/trunk/conf/hbase.schema
    incubator/chukwa/trunk/conf/initial_adaptors
    incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
    incubator/chukwa/trunk/src/main/web/hicc/js/workspace/workspace.js

Modified: incubator/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/CHANGES.txt?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/CHANGES.txt (original)
+++ incubator/chukwa/trunk/CHANGES.txt Sun Nov 27 23:26:48 2011
@@ -98,6 +98,8 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    CHUKWA-588. Updated hbase.schema to match Hadoop 0.20.205.0 metrics and job summary.
(Eric Yang)
+
     CHUKWA-609. Map / to /hicc for HICC web server. (Eric Yang)
 
     CHUKWA-593. Fixed infinite loop archiving at midnight. (Sourygna Luangsay via Eric Yang)

Modified: incubator/chukwa/trunk/conf/chukwa-collector-conf.xml
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/chukwa-collector-conf.xml?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/chukwa-collector-conf.xml (original)
+++ incubator/chukwa/trunk/conf/chukwa-collector-conf.xml Sun Nov 27 23:26:48 2011
@@ -26,12 +26,14 @@
     <value>org.apache.hadoop.chukwa.datacollection.writer.PipelineStageWriter</value>
   </property>
 
+  <!-- Sequence File Writer parameters
   <property>
     <name>chukwaCollector.pipeline</name>
     <value>org.apache.hadoop.chukwa.datacollection.writer.SocketTeeWriter,org.apache.hadoop.chukwa.datacollection.writer.SeqFileWriter</value>
   </property>
+  -->
 
-<!-- LocalWriter parameters
+  <!-- LocalWriter parameters
   <property>
     <name>chukwaCollector.localOutputDir</name>
     <value>${TODO_COLLECTORS_LOCAL_OUTPUT_DIR}</value>
@@ -43,12 +45,12 @@
     <value>org.apache.hadoop.chukwa.datacollection.writer.localfs.LocalWriter</value>
     <description>Local chukwa writer, see LocalWriter.java</description>
   </property>
--->
+  -->
 
-<!-- When writing to HBase, uncomment the following parameters. If you're running
-HBase in distributed mode, you'll also need to copy your hbase-site.xml file with
-your hbase.zookeeper.quorum setting to the conf/ dir. -->
-<!-- HBaseWriter parameters
+  <!-- When writing to HBase, uncomment the following parameters. If you're running
+  HBase in distributed mode, you'll also need to copy your hbase-site.xml file with
+  your hbase.zookeeper.quorum setting to the conf/ dir. -->
+  <!-- HBaseWriter parameters -->
   <property>
     <name>chukwaCollector.pipeline</name>
     <value>org.apache.hadoop.chukwa.datacollection.writer.SocketTeeWriter,org.apache.hadoop.chukwa.datacollection.writer.hbase.HBaseWriter</value>
@@ -76,7 +78,7 @@ your hbase.zookeeper.quorum setting to t
     </description>
   </property>
 
--->
+  <!-- End of HBaseWriter parameters -->
 
   <property>
     <name>writer.hdfs.filesystem</name>

Modified: incubator/chukwa/trunk/conf/chukwa-demux-conf.xml
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/chukwa-demux-conf.xml?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/chukwa-demux-conf.xml (original)
+++ incubator/chukwa/trunk/conf/chukwa-demux-conf.xml Sun Nov 27 23:26:48 2011
@@ -220,5 +220,10 @@
     <value>org.apache.hadoop.chukwa.extraction.demux.processor.mapper.SystemMetrics</value>
     <description></description>
    </property>
+
+   <property>
+    <name>JobSummary</name>
+    <value>org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JobSummary</value>
+   </property>
 </configuration>
  

Modified: incubator/chukwa/trunk/conf/hadoop-log4j.properties
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/hadoop-log4j.properties?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/hadoop-log4j.properties (original)
+++ incubator/chukwa/trunk/conf/hadoop-log4j.properties Sun Nov 27 23:26:48 2011
@@ -1,23 +1,18 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
 # Define some default values that can be overridden by system properties
 hadoop.root.logger=INFO,console
 hadoop.log.dir=.
 hadoop.log.file=hadoop.log
 
+#
+# Job Summary Appender 
+#
+# Use following logger to send summary to separate file defined by 
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+hadoop.mapreduce.jobsummary.logger=INFO,JSA
+# 
+#hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
+
 # Define the root logger to the system property "hadoop.root.logger".
 log4j.rootLogger=${hadoop.root.logger}, EventCounter
 
@@ -28,18 +23,8 @@ log4j.threshhold=ALL
 # Daily Rolling File Appender
 #
 
-#log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
-#log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-
-#
-# CHUKWA
-#
-log4j.appender.DRFA=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
 log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
-log4j.appender.DRFA.recordType=HadoopLog
-log4j.appender.DRFA.chukwaClientHostname=localhost
-log4j.appender.DRFA.chukwaClientPortNum=9093
-
 
 # Rollver at midnight
 log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
@@ -53,45 +38,6 @@ log4j.appender.DRFA.layout.ConversionPat
 # Debugging Pattern format
 #log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
 
-#
-# AUDIT LOGGING - All audit events are logged at INFO level
-#
-# CHUKWA AUDIT LOG
-
-log4j.appender.DRFAAUDIT=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
-log4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/audit.log
-log4j.appender.DRFAAUDIT.recordType=HadoopLog
-log4j.appender.DRFAAUDIT.chukwaClientHostname=localhost
-log4j.appender.DRFAAUDIT.chukwaClientPortNum=9093
-log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
-log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout
-log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=INFO,DRFAAUDIT
-log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
-
-# ClientTrace (Shuffle bytes)
-log4j.appender.MR_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
-log4j.appender.MR_CLIENTTRACE.File=${hadoop.log.dir}/mr_clienttrace.log
-log4j.appender.MR_CLIENTTRACE.recordType=ClientTrace
-log4j.appender.MR_CLIENTTRACE.chukwaClientHostname=localhost
-log4j.appender.MR_CLIENTTRACE.chukwaClientPortNum=9093
-log4j.appender.MR_CLIENTTRACE.DatePattern=.yyyy-MM-dd
-log4j.appender.MR_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
-log4j.appender.MR_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.logger.org.apache.hadoop.mapred.TaskTracker.clienttrace=INFO,MR_CLIENTTRACE
-log4j.additivity.org.apache.hadoop.mapred.TaskTracker.clienttrace=false
-
-# ClientTrace (HDFS bytes)
-log4j.appender.HDFS_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
-log4j.appender.HDFS_CLIENTTRACE.File=${hadoop.log.dir}/hdfs_clienttrace.log
-log4j.appender.HDFS_CLIENTTRACE.recordType=ClientTrace
-log4j.appender.HDFS_CLIENTTRACE.chukwaClientHostname=localhost
-log4j.appender.HDFS_CLIENTTRACE.chukwaClientPortNum=9093
-log4j.appender.HDFS_CLIENTTRACE.DatePattern=.yyyy-MM-dd
-log4j.appender.HDFS_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
-log4j.appender.HDFS_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-log4j.logger.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=INFO,HDFS_CLIENTTRACE
-log4j.additivity.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=false
 
 #
 # console
@@ -109,20 +55,34 @@ log4j.appender.console.layout.Conversion
 
 #Default values
 hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
 hadoop.tasklog.noKeepSplits=4
 hadoop.tasklog.totalLogFileSize=100
 hadoop.tasklog.purgeLogSplits=true
 hadoop.tasklog.logsRetainHours=12
 
-log4j.appender.TLA=org.apache.hadoop.chukwa.inputtools.log4j.TaskLogAppender
-log4j.appender.TLA.recordType=TaskLog
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
 log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
 log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
 
 log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
 log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
 
 #
+#Security audit appender
+#
+hadoop.security.log.file=SecurityAuth.audit
+log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender 
+log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+
+log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+#new logger
+log4j.logger.SecurityLogger=OFF,console
+log4j.logger.SecurityLogger.additivity=false
+
+#
 # Rolling File Appender
 #
 
@@ -137,15 +97,83 @@ log4j.appender.TLA.layout.ConversionPatt
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
 
+#
+# FSNamesystem Audit logging
+# All audit events are logged at INFO level
+#
+log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=WARN
+
 # Custom Logging levels
 
+hadoop.metrics.log.level=INFO
 #log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
 #log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
 #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Null Appender
+# Trap security logger on the hadoop client side
+#
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
 
 #
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop Metrics.
 #
-log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
 
+#
+# Job Summary Appender
+#
+log4j.appender.JSA=org.apache.log4j.net.SocketAppender
+log4j.appender.JSA.RemoteHost=localhost
+log4j.appender.JSA.Port=9098
+log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
+log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+log4j.appender.JSA.DatePattern=.yyyy-MM-dd
+log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
+log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
+
+#
+# AUDIT LOGGING - All audit events are logged at INFO level
+#
+# CHUKWA AUDIT LOG
+
+log4j.appender.DRFAAUDIT=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.DRFAAUDIT.File=${hadoop.log.dir}/audit.log
+log4j.appender.DRFAAUDIT.recordType=HadoopLog
+log4j.appender.DRFAAUDIT.chukwaClientHostname=localhost
+log4j.appender.DRFAAUDIT.chukwaClientPortNum=9093
+log4j.appender.DRFAAUDIT.DatePattern=.yyyy-MM-dd
+log4j.appender.DRFAAUDIT.layout=org.apache.log4j.PatternLayout
+log4j.appender.DRFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=INFO,DRFAAUDIT
+log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
+
+# ClientTrace (Shuffle bytes)
+log4j.appender.MR_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.MR_CLIENTTRACE.File=${hadoop.log.dir}/mr_clienttrace.log
+log4j.appender.MR_CLIENTTRACE.recordType=ClientTrace
+log4j.appender.MR_CLIENTTRACE.chukwaClientHostname=localhost
+log4j.appender.MR_CLIENTTRACE.chukwaClientPortNum=9093
+log4j.appender.MR_CLIENTTRACE.DatePattern=.yyyy-MM-dd
+log4j.appender.MR_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
+log4j.appender.MR_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.logger.org.apache.hadoop.mapred.TaskTracker.clienttrace=INFO,MR_CLIENTTRACE
+log4j.additivity.org.apache.hadoop.mapred.TaskTracker.clienttrace=false
+
+# ClientTrace (HDFS bytes)
+log4j.appender.HDFS_CLIENTTRACE=org.apache.hadoop.chukwa.inputtools.log4j.ChukwaDailyRollingFileAppender
+log4j.appender.HDFS_CLIENTTRACE.File=${hadoop.log.dir}/hdfs_clienttrace.log
+log4j.appender.HDFS_CLIENTTRACE.recordType=ClientTrace
+log4j.appender.HDFS_CLIENTTRACE.chukwaClientHostname=localhost
+log4j.appender.HDFS_CLIENTTRACE.chukwaClientPortNum=9093
+log4j.appender.HDFS_CLIENTTRACE.DatePattern=.yyyy-MM-dd
+log4j.appender.HDFS_CLIENTTRACE.layout=org.apache.log4j.PatternLayout
+log4j.appender.HDFS_CLIENTTRACE.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.logger.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=INFO,HDFS_CLIENTTRACE
+log4j.additivity.org.apache.hadoop.hdfs.server.datanode.DataNode.clienttrace=false

Modified: incubator/chukwa/trunk/conf/hadoop-metrics2.properties
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/hadoop-metrics2.properties?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/hadoop-metrics2.properties (original)
+++ incubator/chukwa/trunk/conf/hadoop-metrics2.properties Sun Nov 27 23:26:48 2011
@@ -1,15 +1,5 @@
 # Stream metrics to Chukwa SocketAdaptor
-*.sink.socket.class=org.apache.hadoop.metrics2.sink.SocketSink
-namenode.sink.socket.host=localhost
-namenode.sink.socket.port=9095
-datanode.sink.socket.host=localhost
-datanode.sink.socket.port=9095
-jobtracker.sink.socket.host=localhost
-jobtracker.sink.socket.port=9095
-tasktracker.sink.socket.host=localhost
-tasktracker.sink.socket.port=9095
-# Stream metrics to Chukwa SocketAdaptor
-*.sink.socket.class=org.apache.hadoop.metrics2.sink.SocketSink
+*.sink.socket.class=org.apache.hadoop.chukwa.inputtools.log4j.Log4jMetricsSink
 namenode.sink.socket.host=localhost
 namenode.sink.socket.port=9095
 datanode.sink.socket.host=localhost

Modified: incubator/chukwa/trunk/conf/hbase.schema
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/hbase.schema?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/hbase.schema (original)
+++ incubator/chukwa/trunk/conf/hbase.schema Sun Nov 27 23:26:48 2011
@@ -1,32 +1,22 @@
-create "Hadoop",{NAME => "ClientTrace", VERSIONS => 65535},
+create "Hadoop",
+{NAME => "ClientTrace", VERSIONS => 65535},
 {NAME => "dfs_namenode", VERSIONS => 65535},
-{NAME => "dfs_FSDirectory", VERSIONS => 65535},
 {NAME => "dfs_FSNamesystem", VERSIONS => 65535},
 {NAME => "dfs_datanode", VERSIONS => 65535},
 {NAME => "mapred_jobtracker", VERSIONS => 65535},
 {NAME => "mapred_shuffleOutput", VERSIONS => 65535},
 {NAME => "mapred_tasktracker", VERSIONS => 65535},
-{NAME => "mapred_job", VERSIONS => 65535},
 {NAME => "jvm_metrics", VERSIONS => 65535},
-{NAME => "dfs", VERSIONS => 65535},
-{NAME => "jvm", VERSIONS => 65535},
 {NAME => "mapred_Queue", VERSIONS => 65535},
 {NAME => "metricssystem_MetricsSystem", VERSIONS => 65535},
 {NAME => "rpc_rpc", VERSIONS => 65535},
 {NAME => "rpcdetailed_rpcdetailed", VERSIONS => 65535},
 {NAME => "ugi_ugi", VERSIONS => 65535}
-create "HadoopLog", {NAME => "NameNode", VERSIONS => 65535},
-{NAME => "DataNode", VERSIONS => 65535},
-{NAME => "Audit", VERSIONS => 65535},
-{NAME => "HadoopLog", VERSIONS => 65535}
-create "HDFS", {NAME => "FileSystem"},
-{NAME => "namenode"},
-{NAME => "datanode"},
-{NAME => "clienttrace"}
-create "Mapreduce", {NAME => "jobs"},
-{NAME => "jobtracker"},
-{NAME => "tasktracker"},
-{NAME => "clienttrace"}
+create "HadoopLog", 
+{NAME => "NameNode", VERSIONS => 65535},
+{NAME => "Audit", VERSIONS => 65535}
+create "Jobs",
+{NAME => "summary" }
 create "SystemMetrics", 
 {NAME => "cpu", VERSIONS => 65535},
 {NAME => "system", VERSION => 65535},
@@ -34,13 +24,15 @@ create "SystemMetrics", 
 {NAME => "memory", VERSION => 65535},
 {NAME => "network", VERSION => 65535},
 {NAME => "tags", VERSION => 65535}
-create "ClusterSummary", {NAME=> "cpu", VERSIONS => 65535},
+create "ClusterSummary", 
+{NAME=> "cpu", VERSIONS => 65535},
 {NAME => "system", VERSION => 65535},
 {NAME => "disk", VERSION => 65535},
 {NAME => "memory", VERSION => 65535},
 {NAME => "network", VERSION => 65535},
 {NAME => "hdfs", VERSION => 65535},
 {NAME => "mapreduce", VERSION => 65535}
-create "chukwa", {NAME=>"chukwaAgent_chunkQueue", VERSIONS => 65535},
+create "chukwa", 
+{NAME=>"chukwaAgent_chunkQueue", VERSIONS => 65535},
 {NAME => "chukwaAgent_metrics", VERSION => 65535},
 {NAME => "chukwaAgent_httpSender", VERSION => 65535}

Modified: incubator/chukwa/trunk/conf/initial_adaptors
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/initial_adaptors?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/initial_adaptors (original)
+++ incubator/chukwa/trunk/conf/initial_adaptors Sun Nov 27 23:26:48 2011
@@ -2,3 +2,4 @@ add sigar.SystemMetrics SystemMetrics 60
 add SocketAdaptor HadoopMetrics 9095 0
 add SocketAdaptor Hadoop 9096 0
 add SocketAdaptor ChukwaMetrics 9097 0
+add SocketAdaptor JobSummary 9098 0

Modified: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
(original)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java
Sun Nov 27 23:26:48 2011
@@ -30,6 +30,7 @@ import org.apache.hadoop.chukwa.datacoll
 import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Table;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.log4j.Logger;
@@ -41,14 +42,12 @@ import org.json.simple.JSONValue;
 @Table(name="Hadoop",columnFamily="mapred_metrics"),
 @Table(name="Hadoop",columnFamily="dfs_metrics"),
 @Table(name="Hadoop",columnFamily="dfs_namenode"),
-@Table(name="Hadoop",columnFamily="dfs_FSDirectory"),
 @Table(name="Hadoop",columnFamily="dfs_FSNamesystem"),
 @Table(name="Hadoop",columnFamily="dfs_datanode"),
 @Table(name="Hadoop",columnFamily="mapred_jobtracker"),
 @Table(name="Hadoop",columnFamily="mapred_shuffleInput"),
 @Table(name="Hadoop",columnFamily="mapred_shuffleOutput"),
 @Table(name="Hadoop",columnFamily="mapred_tasktracker"),
-@Table(name="Hadoop",columnFamily="mapred_job"),
 @Table(name="Hadoop",columnFamily="rpc_metrics")
 })
 public class HadoopMetricsProcessor extends AbstractProcessor {
@@ -121,7 +120,9 @@ public class HadoopMetricsProcessor exte
           recordName = (String) json.get(keyName);
           record.add(keyName, json.get(keyName).toString());
         } else {
-          record.add(keyName, json.get(keyName).toString());
+          if(json.get(keyName)!=null) {
+            record.add(keyName, json.get(keyName).toString());
+          }
         }
       }
       if(contextName!=null) {
@@ -130,7 +131,11 @@ public class HadoopMetricsProcessor exte
       }
       datasource.append(recordName);
       record.add("cluster", chunk.getTag("cluster"));
-      buildGenericRecord(record, null, d.getTime(), datasource.toString());
+      if(contextName!=null && contextName.equals("jvm")) {
+        buildJVMRecord(record, d.getTime(), datasource.toString());        
+      } else {
+        buildGenericRecord(record, null, d.getTime(), datasource.toString());
+      }
       output.collect(key, record);
     } catch (ParseException e) {
       log.warn("Wrong format in HadoopMetricsProcessor [" + recordEntry + "]",
@@ -148,6 +153,22 @@ public class HadoopMetricsProcessor exte
 
   }
 
+  protected void buildJVMRecord(ChukwaRecord record, long timestamp, String dataSource) {
+    calendar.setTimeInMillis(timestamp);
+    calendar.set(Calendar.MINUTE, 0);
+    calendar.set(Calendar.SECOND, 0);
+    calendar.set(Calendar.MILLISECOND, 0);
+
+    key.setKey("" + calendar.getTimeInMillis() + "/" + chunk.getSource() + ":" + 
+        record.getValue("processName")+ "/" + timestamp);
+    key.setReduceType(dataSource);
+    record.setTime(timestamp);
+
+    record.add(Record.tagsField, chunk.getTags());
+    record.add(Record.sourceField, chunk.getSource());
+    record.add(Record.applicationField, chunk.getStreamName());
+  }
+  
   public String getDataType() {
     return HadoopMetricsProcessor.class.getName();
   }

Added: incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java?rev=1206918&view=auto
==============================================================================
--- incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
(added)
+++ incubator/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java
Sun Nov 27 23:26:48 2011
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.demux.processor.mapper;
+
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Iterator;
+
+import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Tables;
+import org.apache.hadoop.chukwa.datacollection.writer.hbase.Annotation.Table;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.log4j.Logger;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+@Tables(annotations={
+@Table(name="Jobs",columnFamily="summary")
+})
+public class JobSummary extends AbstractProcessor {  
+  static Logger log = Logger.getLogger(JobSummary.class);
+  static final String chukwaTimestampField = "chukwa_timestamp";
+  static final String contextNameField = "contextName";
+  static final String recordNameField = "recordName";
+
+  private SimpleDateFormat sdf = null;
+
+  public JobSummary() {
+    // TODO move that to config
+    sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  protected void parse(String recordEntry,
+      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
+      throws Throwable {
+    try {
+      // Look for syslog PRI, if PRI is not found, start from offset of 0.
+      int idx = recordEntry.indexOf('>', 0);  
+      String dStr = recordEntry.substring(idx+1, idx+23);
+      int start = idx + 25;
+      idx = recordEntry.indexOf(' ', start);
+      // String level = recordEntry.substring(start, idx);
+      start = idx + 1;
+      idx = recordEntry.indexOf(' ', start);
+      // String className = recordEntry.substring(start, idx-1);
+      String body = recordEntry.substring(idx + 1);
+      body.replaceAll("\n", "");
+      // log.info("record [" + recordEntry + "] body [" + body +"]");
+      Date d = sdf.parse(dStr);
+
+      ChukwaRecord record = new ChukwaRecord();
+
+      String[] list = body.split(",");
+      for(String pair : list) {
+        String[] kv = pair.split("=");
+        record.add(kv[0], kv[1]);
+      }
+      record.add("cluster", chunk.getTag("cluster"));
+      buildGenericRecord(record, d.getTime(), "summary");
+      output.collect(key, record);
+    } catch (ParseException e) {
+      log.warn("Wrong format in JobSummary [" + recordEntry + "]",
+          e);
+      throw e;
+    } catch (IOException e) {
+      log.warn("Unable to collect output in JobSummary ["
+          + recordEntry + "]", e);
+      throw e;
+    } catch (Exception e) {
+      log.warn("Wrong format in JobSummary [" + recordEntry + "]",
+          e);
+      throw e;
+    }
+
+  }
+
+  protected void buildGenericRecord(ChukwaRecord record,
+      long timestamp, String dataSource) {
+    calendar.setTimeInMillis(timestamp);
+    calendar.set(Calendar.MINUTE, 0);
+    calendar.set(Calendar.SECOND, 0);
+    calendar.set(Calendar.MILLISECOND, 0);
+
+    key.setKey("" + calendar.getTimeInMillis() + "/" + record.getValue("jobId") + "/"
+        + timestamp);
+    key.setReduceType(dataSource);
+    record.setTime(timestamp);
+
+    record.add(Record.tagsField, chunk.getTags());
+    record.add(Record.sourceField, chunk.getSource());
+    record.add(Record.applicationField, chunk.getStreamName());
+
+  }
+  
+  public String getDataType() {
+    return JobSummary.class.getName();
+  }
+
+}

Modified: incubator/chukwa/trunk/src/main/web/hicc/js/workspace/workspace.js
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/js/workspace/workspace.js?rev=1206918&r1=1206917&r2=1206918&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/js/workspace/workspace.js (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/js/workspace/workspace.js Sun Nov 27 23:26:48
2011
@@ -2573,8 +2573,8 @@ function onLabelClick(id) {
       detail+='</td></tr><tr><td><input class="formButton" type="button"
name="addwidget" value="Add Widget" onClick="add_widget(\''+id+'\');"/>';
       detail+="</td></tr>";
       detail+="</table></td>";
-      detail+="<td width='55%'><div id='_preview' style='width:100%;overflow:hidden;'>";
-      detail+="<div id='_preview' class='dragableBoxContent' style='width:100%;height:200px;overflow:hidden;'><img
src='/hicc/images/loading.gif'></div></div></td></tr></table>";
+      detail+="<td width='55%'><div style='width:100%;height:280px;overflow:hidden;'>";
+      detail+="<div id='_preview' class='dragableBoxContent' style='width:100%;height:280px;overflow:hidden;'><img
src='/hicc/images/loading.gif'></div></div></td></tr></table>";
 
       jQuery('#widget_detail').empty();
       jQuery('#widget_detail').append(detail);



Mime
View raw message