chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r1207789 - in /incubator/chukwa/trunk: ./ conf/ script/pig/ src/main/web/hicc/WEB-INF/ src/main/web/hicc/descriptors/ src/main/web/hicc/descriptors/disabled/ src/main/web/hicc/jsp/ src/main/web/hicc/views/
Date Tue, 29 Nov 2011 08:40:41 GMT
Author: eyang
Date: Tue Nov 29 08:40:38 2011
New Revision: 1207789

URL: http://svn.apache.org/viewvc?rev=1207789&view=rev
Log:
CHUKWA-606. Updated HICC to interface with data in HBase. (Eric Yang)

Added:
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/client_trace.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/client_trace.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/dfs_throughput.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_throughput.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/hadoop_activity.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_activity.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/heatmap-static.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/heatmap-static.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/heatmap_datanode.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/heatmap_datanode.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/jvm_metrics.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/jvm_metrics.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/node_activity_chart.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/node_activity_chart.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/rpc_metrics.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/rpc_metrics.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/swimlanes-static.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/swimlanes-static.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disabled/swimlanes.descriptor.disabled
      - copied unchanged from r1204808, incubator/chukwa/trunk/src/main/web/hicc/descriptors/swimlanes.descriptor
Removed:
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/client_trace.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_disk.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_disk_pcnt.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics_pcnt.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_throughput.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disk.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/disk_pcnt.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_activity.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/heatmap-static.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/heatmap_datanode.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/jvm_metrics.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/node_activity_chart.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/rpc_metrics.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/swimlanes-static.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/swimlanes.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/system_metrics_pcnt.descriptor
Modified:
    incubator/chukwa/trunk/CHANGES.txt
    incubator/chukwa/trunk/conf/chukwa-env.sh
    incubator/chukwa/trunk/conf/hadoop-log4j.properties
    incubator/chukwa/trunk/script/pig/ClusterSummary.pig
    incubator/chukwa/trunk/src/main/web/hicc/WEB-INF/jetty.xml
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_selector.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/debug.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_fsnamesystem.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/event_viewer.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_mapred.descriptor
    incubator/chukwa/trunk/src/main/web/hicc/jsp/debug.jsp
    incubator/chukwa/trunk/src/main/web/hicc/jsp/event.jsp
    incubator/chukwa/trunk/src/main/web/hicc/jsp/event_viewer.jsp
    incubator/chukwa/trunk/src/main/web/hicc/jsp/events-xml.jsp
    incubator/chukwa/trunk/src/main/web/hicc/jsp/job_viewer.jsp
    incubator/chukwa/trunk/src/main/web/hicc/views/default.view

Modified: incubator/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/CHANGES.txt?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/CHANGES.txt (original)
+++ incubator/chukwa/trunk/CHANGES.txt Tue Nov 29 08:40:38 2011
@@ -98,6 +98,8 @@ Trunk (unreleased changes)
 
   BUG FIXES
 
+    CHUKWA-606. Updated HICC to interface with data in HBase. (Eric Yang)
+
     CHUKWA-588. Updated hbase.schema to match Hadoop 0.20.205.0 metrics and job summary. (Eric Yang)
 
     CHUKWA-609. Map / to /hicc for HICC web server. (Eric Yang)

Modified: incubator/chukwa/trunk/conf/chukwa-env.sh
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/chukwa-env.sh?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/chukwa-env.sh (original)
+++ incubator/chukwa/trunk/conf/chukwa-env.sh Tue Nov 29 08:40:38 2011
@@ -28,7 +28,10 @@ export JAVA_HOME=${JAVA_HOME}
 # The location of HBase Configuration directory.  For writing data to
 # HBase, you need to set environment variable HBASE_CONF to HBase conf
 # directory.
-export HBASE_CONF_DIR="${TODO_HBASE_CONF_DIR}"
+export HBASE_CONF_DIR="${HBASE_CONF_DIR}"
+
+# Hadoop Configuration directory
+export HADOOP_CONF_DIR="${HADOOP_CONF_DIR}"
 
 # The location of chukwa data repository (in either HDFS or your local
 # file system, whichever you are using)
@@ -63,4 +66,4 @@ export CHUKWA_HICC_MAX_MEM=
 # HICC Jetty Server port, defaults to 4080
 #export CHUKWA_HICC_PORT=
 
-export CLASSPATH=${CLASSPATH}:${HBASE_CONF_DIR}
+export CLASSPATH=${CLASSPATH}:${HBASE_CONF_DIR}:${HADOOP_CONF_DIR}

Modified: incubator/chukwa/trunk/conf/hadoop-log4j.properties
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/conf/hadoop-log4j.properties?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/conf/hadoop-log4j.properties (original)
+++ incubator/chukwa/trunk/conf/hadoop-log4j.properties Tue Nov 29 08:40:38 2011
@@ -134,7 +134,6 @@ log4j.appender.JSA.RemoteHost=localhost
 log4j.appender.JSA.Port=9098
 log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
 log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
-log4j.appender.JSA.DatePattern=.yyyy-MM-dd
 log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
 log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
 

Modified: incubator/chukwa/trunk/script/pig/ClusterSummary.pig
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/script/pig/ClusterSummary.pig?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/script/pig/ClusterSummary.pig (original)
+++ incubator/chukwa/trunk/script/pig/ClusterSummary.pig Tue Nov 29 08:40:38 2011
@@ -5,13 +5,20 @@ ConcatBuffer = foreach CleanseBuffer gen
 TimeSeries = GROUP ConcatBuffer BY rowId;
 ComputeBuffer = FOREACH TimeSeries GENERATE group, AVG(ConcatBuffer.cpuCombined), AVG(ConcatBuffer.cpuIdle), AVG(ConcatBuffer.cpuSys), AVG(ConcatBuffer.cpuUser), AVG(ConcatBuffer.diskReadBytes), AVG(ConcatBuffer.diskReads), AVG(ConcatBuffer.diskWriteBytes), AVG(ConcatBuffer.diskWrites), AVG(ConcatBuffer.LoadAverage), AVG(ConcatBuffer.memoryFreePercent), AVG(ConcatBuffer.memoryUsedPercent), AVG(ConcatBuffer.networkRxBytes), AVG(ConcatBuffer.networkRxDropped), AVG(ConcatBuffer.networkRxErrors), AVG(ConcatBuffer.networkRxPackets), AVG(ConcatBuffer.networkTxBytes), AVG(ConcatBuffer.networkTxCollisions), AVG(ConcatBuffer.networkTxErrors), AVG(ConcatBuffer.networkTxPackets);
 STORE ComputeBuffer INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('cpu:Combined cpu:Idle cpu:Sys cpu:User disk:ReadBytes disk:Reads disk:WriteBytes disk:Writes system:LoadAverage memory:FreePercent memory:UsedPercent network:RxBytes network:RxDropped network:RxErrors network:RxPackets network:TxBytes network:TxCollisions network:TxErrors network:TxPackets');
-HDFSMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('dfs_FSNamesystem:cluster dfs_FSNamesystem:CapacityTotalGB dfs_FSNamesystem:CapacityUsedGB dfs_FSNamesystem:CapacityRemainingGB dfs_FSNamesystem:BlockCapacity dfs_FSNamesystem:BlocksTotal dfs_FSNamesystem:MissingBlocks dfs_FSNamesystem:CorruptBlocks dfs_FSNamesystem:UnderReplicatedBlocks','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks);
-CleanseBuffer = foreach HDFSMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks;
-ConcatBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks;
-STORE ConcatBuffer INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('hdfs:CapacityTotalGB hdfs:CapacityUsedGB hdfs:CapacityRemainingGB hdfs:BlockCapacity hdfs:BlocksTotal hdfs:MissingBlocks hdfs:CorruptBlocks hdfs:UnderReplicatedBlocks');
-MapReduceMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('mapred_tasktracker:cluster mapred_tasktracker:mapTaskSlots mapred_tasktracker:maps_running mapred_tasktracker:reduceTaskSlots mapred_tasktracker:reduces_running mapred_tasktracker:tasks_completed mapred_tasktracker:tasks_failed_ping mapred_tasktracker:tasks_failed_timeout','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, mapTaskSlots, mapsRunning, reduceTaskSlots, reduceRunning, tasksCompleted, tasksFailedPing, tasksFailedTimeout);
+HDFSMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('dfs_FSNamesystem:cluster dfs_FSNamesystem:CapacityTotalGB dfs_FSNamesystem:CapacityUsedGB dfs_FSNamesystem:CapacityRemainingGB dfs_FSNamesystem:BlockCapacity dfs_FSNamesystem:BlocksTotal dfs_FSNamesystem:MissingBlocks dfs_FSNamesystem:CorruptBlocks dfs_FSNamesystem:UnderReplicatedBlocks dfs_FSNamesystem:FilesTotal','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal);
+CleanseBuffer = foreach HDFSMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal;
+ConcatBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, CapacityTotalGB, CapacityUsedGB, CapacityRemainingGB, BlockCapacity, BlocksTotal, MissingBlocks, CorruptBlocks, UnderReplicatedBlocks, FilesTotal;
+STORE ConcatBuffer INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('hdfs:CapacityTotalGB hdfs:CapacityUsedGB hdfs:CapacityRemainingGB hdfs:BlockCapacity hdfs:BlocksTotal hdfs:MissingBlocks hdfs:CorruptBlocks hdfs:UnderReplicatedBlocks hdfs:FilesTotal');
+MapReduceMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('mapred_tasktracker:cluster mapred_tasktracker:mapTaskSlots mapred_tasktracker:maps_running mapred_tasktracker:reduceTaskSlots mapred_tasktracker:reduces_running mapred_tasktracker:tasks_completed mapred_tasktracker:tasks_failed_ping mapred_tasktracker:tasks_failed_timeout mapred_jobtracker:occupied_map_slots','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, mapTaskSlots, mapsRunning, reduceTaskSlots, reduceRunning, tasksCompleted, tasksFailedPing, tasksFailedTimeout);
 CleanseBuffer = foreach MapReduceMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, mapTaskSlots, mapsRunning, reduceTaskSlots, reduceRunning, tasksCompleted, tasksFailedPing, tasksFailedTimeout;
 GroupBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, mapTaskSlots, mapsRunning, reduceTaskSlots, reduceRunning, tasksCompleted, tasksFailedPing, tasksFailedTimeout;
 TimeSeries = GROUP GroupBuffer BY rowId;
 MapReduceSummary = FOREACH TimeSeries GENERATE group, SUM(GroupBuffer.mapTaskSlots), SUM(GroupBuffer.mapsRunning), SUM(GroupBuffer.reduceTaskSlots), SUM(GroupBuffer.reduceRunning), SUM(GroupBuffer.tasksCompleted), SUM(GroupBuffer.tasksFailedPing), SUM(GroupBuffer.tasksFailedTimeout);
 STORE MapReduceSummary INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('mapreduce:mapTaskSlots mapreduce:mapsRunning mapreduce:reduceTaskSlots mapreduce:reduceRunning mapreduce:tasksCompleted mapreduce:tasksFailedPing mapreduce:tasksFailedTimeout'); 
+
+MapReduceMetrics = load 'hbase://Hadoop' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('mapred_jobtracker:cluster mapred_jobtracker:occupied_map_slots mapred_jobtracker:occupied_reduce_slots','-loadKey -gt $START -caster Utf8StorageConverter') AS (rowKey, cluster, occupiedMapSlots, occupiedReduceSlots);
+CleanseBuffer = foreach MapReduceMetrics generate REGEX_EXTRACT($0,'^\\d+',0) as time, cluster, occupiedMapSlots, occupiedReduceSlots;
+GroupBuffer = foreach CleanseBuffer generate CONCAT(CONCAT($0, '-'), $1) as rowId, occupiedMapSlots, occupiedReduceSlots;
+TimeSeries = GROUP GroupBuffer BY rowId;
+MapReduceSummary = FOREACH TimeSeries GENERATE group, SUM(GroupBuffer.occupiedMapSlots), SUM(GroupBuffer.occupiedReduceSlots);
+STORE MapReduceSummary INTO 'ClusterSummary' USING org.apache.pig.backend.hadoop.hbase.HBaseStorage('mapreduce:occupiedMapSlots mapreduce:occupiedReduceSlots'); 

Modified: incubator/chukwa/trunk/src/main/web/hicc/WEB-INF/jetty.xml
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/WEB-INF/jetty.xml?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/WEB-INF/jetty.xml (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/WEB-INF/jetty.xml Tue Nov 29 08:40:38 2011
@@ -162,7 +162,7 @@
           <Set name="contexts"><Ref id="Contexts"/></Set>
           <Set name="webAppDir"><SystemProperty name="CHUKWA_HOME" default="."/>/share/chukwa/webapps</Set>
 	  <Set name="parentLoaderPriority">false</Set>
-	  <Set name="extract">false</Set>
+	  <Set name="extract">true</Set>
 	  <Set name="allowDuplicates">false</Set>
         </New>
       </Arg>

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_metrics.descriptor Tue Nov 29 08:40:38 2011
@@ -3,52 +3,36 @@
 "title":"Cluster Metrics",
 "version":"0.1",
 "categories":"System Metrics,Cluster",
-"url":"iframe/jsp/single-series-chart-javascript.jsp",
+"url":"iframe/jsp/chart.jsp",
 "description":"Display cluster related stats",
 "screendump":"\/images\/server_load.gif",
 "refresh":"15",
 "parameters":[
-{"name":"table","type":"string","value":"cluster_system_metrics","edit":"0"},
+{"name":"title","type":"string","value":"","edit":"1","label":"Title"},
+{"name":"table","type":"string","value":"system_metrics","edit":"0"},
+{"name":"group","type":"string","value":"host","edit":"0"},
+{"name":"group_items","type":"string","value":"cluster","edit":"0"},
 {"name":"period","type":"custom","control":"period_control","value":"","label":"Period"},
-{"name":"metric","type":"select_multiple","value":"load_15","label":"Metric","options":[
-{"label":"Reporting Hosts","value":"host"},
-{"label":"load_15","value":"load_15"},
-{"label":"load_5","value":"load_5"},
-{"label":"load_1","value":"load_1"},
-{"label":"task_total","value":"task_total"},
-{"label":"task_running","value":"task_running"},
-{"label":"task_sleep","value":"task_sleep"},
-{"label":"task_stopped","value":"task_stopped"},
-{"label":"task_zombie","value":"task_zombie"},
-{"label":"mem_total","value":"mem_total"},
-{"label":"mem_buffers","value":"mem_buffers"},
-{"label":"mem_cached","value":"mem_cached"},
-{"label":"mem_used","value":"mem_used"},
-{"label":"mem_free","value":"mem_free"},
-{"label":"eth0_rxerrs","value":"eth0_rxerrs"},
-{"label":"eth0_rxbyts","value":"eth0_rxbyts"},
-{"label":"eth0_rxpcks","value":"eth0_rxpcks"},
-{"label":"eth0_rxdrops","value":"eth0_rxdrops"},
-{"label":"eth0_txerrs","value":"eth0_txerrs"},
-{"label":"eth0_txbyts","value":"eth0_txbyts"},
-{"label":"eth0_txpcks","value":"eth0_txpcks"},
-{"label":"eth0_txdrops","value":"eth0_txdrops"},
-{"label":"eth1_rxerrs","value":"eth1_rxerrs"},
-{"label":"eth1_rxbyts","value":"eth1_rxbyts"},
-{"label":"eth1_rxpcks","value":"eth1_rxpcks"},
-{"label":"eth1_rxdrops","value":"eth1_rxdrops"},
-{"label":"eth1_txerrs","value":"eth1_txerrs"},
-{"label":"eth1_txbyts","value":"eth1_txbyts"},
-{"label":"eth1_txpcks","value":"eth1_txpcks"},
-{"label":"eth1_txdrops","value":"eth1_txdrops"},
-{"label":"sda_rkbs","value":"sda_rkbs"},
-{"label":"sda_wkbs","value":"sda_wkbs"},
-{"label":"sdb_rkbs","value":"sdb_rkbs"},
-{"label":"sdb_wkbs","value":"sdb_wkbs"},
-{"label":"sdc_rkbs","value":"sdc_rkbs"},
-{"label":"sdc_wkbs","value":"sdc_wkbs"},
-{"label":"sdd_rkbs","value":"sdd_rkbs"},
-{"label":"sdd_wkbs","value":"sdd_wkbs"}
+{"name":"data","type":"select_multiple","value":"/hicc/v1/metrics/series/ClusterSummary/cpu:Combined/session/cluster","label":"Metric","options":[
+{"label":"CPU Utilizaion","value":"/hicc/v1/metrics/series/ClusterSummary/cpu:Combined/session/cluster"},
+{"label":"CPU Idle","value":"/hicc/v1/metrics/series/ClusterSummary/cpu:Idle/session/cluster"},
+{"label":"CPU Utilization By System","value":"/hicc/v1/metrics/series/ClusterSummary/cpu:Sys/session/cluster"},
+{"label":"CPU Utilization By User","value":"/hicc/v1/metrics/series/ClusterSummary/cpu:User/session/cluster"},
+{"label":"Disk Read Bytes","value":"/hicc/v1/metrics/series/ClusterSummary/disk:ReadBytes/session/cluster"},
+{"label":"Disk Read Operations","value":"/hicc/v1/metrics/series/ClusterSummary/disk:Reads/session/cluster"},
+{"label":"Disk Write Bytes","value":"/hicc/v1/metrics/series/ClusterSummary/disk:WriteBytes/session/cluster"},
+{"label":"Disk Write Operations","value":"/hicc/v1/metrics/series/ClusterSummary/disk:Writes/session/cluster"},
+{"label":"Memory Free Percentage","value":"/hicc/v1/metrics/series/ClusterSummary/memory:FreePercent/session/cluster"},
+{"label":"Memory Used Percentage","value":"/hicc/v1/metrics/series/ClusterSummary/memory:UsedPercent/session/cluster"},
+{"label":"Network Receive Bytes","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxBytes/session/cluster"},
+{"label":"Network Receive Dropped","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxDropped/session/cluster"},
+{"label":"Network Receive Errors","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxErrors/session/cluster"},
+{"label":"Network Receive Packets","value":"/hicc/v1/metrics/series/ClusterSummary/network:RxPackets/session/cluster"},
+{"label":"Network Transfer Bytes","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxBytes/session/cluster"},
+{"label":"Network Transfer Collisions","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxCollisions/session/cluster"},
+{"label":"Network Transfer Errors","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxErrors/session/cluster"},
+{"label":"Network Transfer Packets","value":"/hicc/v1/metrics/series/ClusterSummary/network:TxPackets/session/cluster"},
+{"label":"Load Average","value":"/hicc/v1/metrics/series/ClusterSummary/system:LoadAverage/session/cluster"},
 ]},
 {"name":"width","type":"select","value":"300","label":"Width","options":[
 {"label":"300","value":"300"},
@@ -56,16 +40,19 @@
 {"label":"500","value":"500"},
 {"label":"600","value":"600"},
 {"label":"800","value":"800"},
+{"label":"1000","value":"1000"},
 {"label":"1200","value":"1200"}
 ]},
 {"name":"height","type":"select","value":"200","label":"Height","options":[
 {"label":"200","value":"200"},
 {"label":"400","value":"400"},
 {"label":"600","value":"600"},
+{"label":"800","value":"800"},
 {"label":"1000","value":"1000"}
 ]},
 {"name":"legend","type":"radio","value":"on","label":"Show Legends","options":[
 {"label":"On","value":"on"},
-{"label":"Off","value":"off"}]}
+{"label":"Off","value":"off"}
+]}
 ]
 }

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_selector.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_selector.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_selector.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/cluster_selector.descriptor Tue Nov 29 08:40:38 2011
@@ -3,10 +3,11 @@
 "title":"Cluster Selector",
 "version":"0.1",
 "categories":"Global,Utilities",
-"url":"jsp\/cluster_selector.jsp",
+"url":"jsp/cluster_selector.jsp",
 "description":"Global control to manipulate cluster selection across widgets",
 "screendump":"",
 "refresh":"0",
 "parameters":[
+{"name":"height","type":"string","value":"0","edit":"0"}
 ]
 }

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/debug.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/debug.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/debug.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/debug.descriptor Tue Nov 29 08:40:38 2011
@@ -3,9 +3,10 @@
 "title":"Session Debugger",
 "version":"0.1",
 "categories":"Developer,Utilities",
-"url":"jsp\/debug.jsp",
+"url":"jsp/debug.jsp",
 "description":"Display session stats",
 "refresh":"15",
 "parameters":[
+{"name":"height","type":"string","value":"0","edit":"0"}
 ]
 }

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_fsnamesystem.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_fsnamesystem.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_fsnamesystem.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/dfs_fsnamesystem.descriptor Tue Nov 29 08:40:38 2011
@@ -1,10 +1,10 @@
 {
 "id":"dfs_fsnamesystem",
-"title":"HDFS File System Name System Metrics",
+"title":"HDFS Metrics",
 "version":"0.1",
 "categories":"Hadoop,HDFS",
 "url":"iframe/jsp/chart.jsp",
-"description":"Display File System Name System related stats",
+"description":"Display HDFS related stats",
 "screendump":"\/images\/server_load.gif",
 "refresh":"15",
 "parameters":[
@@ -13,21 +13,16 @@
 {"name":"group","type":"string","value":"host","edit":"0"},
 {"name":"group_items","type":"string","value":"hosts","edit":"0"},
 {"name":"period","type":"custom","control":"period_control","value":"","label":"Period"},
-{"name":"data","type":"select_multiple","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:CapacityRemainingGB/session/hosts","label":"Metric","options":[
-{"label":"BlockCapacity","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:BlockCapacity/session/hosts"},
-{"label":"BlocksTotal","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:BlocksTotal/session/hosts"},
-{"label":"CapacityRemainingGB","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:CapacityRemainingGB/session/hosts"}, 
-{"label":"CapacityTotalGB","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:CapacityTotalGB/session/hosts"},
-{"label":"CapacityUsedGB","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:CapacityUsedGB/session/hosts"},
-{"label":"CorruptBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:CorruptBlocks/session/hosts"},
-{"label":"ExcessBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:ExcessBlocks/session/hosts"},
-{"label":"FilesTotal","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:FilesTotal/session/hosts"},
-{"label":"MissingBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:MissingBlocks/session/hosts"},
-{"label":"PendingDeletionBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:PendingDeletionBlocks/session/hosts"}, 
-{"label":"PendingReplicationBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:PendingReplicationBlocks/session/hosts"},
-{"label":"ScheduledReplicationBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:ScheduledReplicationBlocks/session/hosts"},
-{"label":"TotalLoad","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:TotalLoad/session/hosts"},
-{"label":"UnderReplicatedBlocks","value":"/hicc/v1/metrics/series/Hadoop/dfs_FSNamesystem:UnderReplicatedBlocks/session/hosts"}
+{"name":"data","type":"select_multiple","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:CapacityRemainingGB/session/cluster","label":"Metric","options":[
+{"label":"Block Capacity","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:BlockCapacity/session/cluster"},
+{"label":"Blocks Total","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:BlocksTotal/session/cluster"},
+{"label":"Capacity Remaining In GB","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:CapacityRemainingGB/session/cluster"}, 
+{"label":"Capacity Total In GB","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:CapacityTotalGB/session/cluster"},
+{"label":"Capacity Used In GB","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:CapacityUsedGB/session/cluster"},
+{"label":"Corrupted Blocks","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:CorruptBlocks/session/cluster"},
+{"label":"Missing Blocks","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:MissingBlocks/session/cluster"},
+{"label":"Under Replicated Blocks","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:UnderReplicatedBlocks/session/cluster"}
+{"label":"Files Total","value":"/hicc/v1/metrics/series/ClusterSummary/hdfs:FilesTotal/session/cluster"}
 ]},
 {"name":"width","type":"select","value":"300","label":"Width","options":[
 {"label":"300","value":"300"},

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/event_viewer.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/event_viewer.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/event_viewer.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/event_viewer.descriptor Tue Nov 29 08:40:38 2011
@@ -1,14 +1,14 @@
 {
 "id":"event_browser",
-"title":"Event Viewer",
+"title":"Jobs Summary Browser",
 "version":"0.1",
-"categories":"Global",
+"categories":"Hadoop,Map/Reduce",
 "url":"jsp\/event_viewer.jsp",
-"description":"Display Time sorted events",
+"description":"Display job summary events",
 "screendump":"",
 "refresh":"15",
 "parameters":[
-{"name":"database","type":"select_callback","value":"","label":"Data Source","callback":"/hicc/jsp/get_data_source.jsp"},
+{"name":"table","type":"string","value":"Jobs","edit":"0"},
 {"name":"type","type":"radio","value":"graph","label":"Display Type","options":[
 {"label":"Graph","value":"graph"},
 {"label":"List","value":"list"}

Modified: incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_mapred.descriptor
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_mapred.descriptor?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_mapred.descriptor (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/descriptors/hadoop_mapred.descriptor Tue Nov 29 08:40:38 2011
@@ -9,29 +9,17 @@
 "refresh":"15",
 "parameters":[
 {"name":"title","type":"string","value":"","edit":"1","label":"Title"},
-{"name":"table","type":"string","value":"hadoop_mapred","edit":"0"},
 {"name":"period","type":"custom","control":"period_control","value":"","label":"Period"},
-{"name":"data","type":"select_multiple","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:jobs_completed/session/hosts","label":"Metric","options":[
-{"label":"mapred_jobtracker:jobs_completed","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:jobs_completed/session/hosts"},
-{"label":"mapred_jobtracker:jobs_submitted","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:jobs_submitted/session/hosts"},
-{"label":"mapred_jobtracker:maps_completed","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:maps_completed/session/hosts"},
-{"label":"mapred_jobtracker:maps_failed","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:maps_failed/session/hosts"},
-{"label":"mapred_jobtracker:maps_launched","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:maps_launched/session/hosts"},
-{"label":"mapred_jobtracker:reduces_completed","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:reduces_completed/session/hosts"},
-{"label":"mapred_jobtracker:reduces_failed","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:reduces_failed/session/hosts"},
-{"label":"mapred_jobtracker:reduces_launched","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:reduces_launched/session/hosts"},
-{"label":"mapred_jobtracker:waiting_tasks","value":"/hicc/v1/metrics/series/Hadoop/mapred_jobtracker:waiting_tasks/session/hosts"},
-{"label":"mapred_shuffleOutput:shuffle_failed_outputs","value":"/hicc/v1/metrics/series/Hadoop/mapred_shuffleOutput:shuffle_failed_outputs/session/hosts"},
-{"label":"mapred_shuffleOutput:shuffle_handler_busy_percent","value":"/hicc/v1/metrics/series/Hadoop/mapred_shuffleOutput:shuffle_handler_busy_percent/session/hosts"},
-{"label":"mapred_shuffleOutput:shuffle_output_bytes","value":"/hicc/v1/metrics/series/Hadoop/mapred_shuffleOutput:shuffle_output_bytes/session/hosts"},
-{"label":"mapred_shuffleOutput:shuffle_success_outputs","value":"/hicc/v1/metrics/series/Hadoop/mapred_shuffleOutput:shuffle_success_outputs/session/hosts"},
-{"label":"mapred_tasktracker:mapTaskSlots","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:mapTaskSlots/session/hosts"},
-{"label":"mapred_tasktracker:maps_running","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:maps_running/session/hosts"},
-{"label":"mapred_tasktracker:reduceTaskSlots","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:reduceTaskSlots/session/hosts"},
-{"label":"mapred_tasktracker:reduces_running","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:reduces_running/session/hosts"},
-{"label":"mapred_tasktracker:tasks_completed","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:tasks_completed/session/hosts"},
-{"label":"mapred_tasktracker:tasks_failed_ping","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:tasks_failed_ping/session/hosts"},
-{"label":"mapred_tasktracker:tasks_failed_timeout","value":"/hicc/v1/metrics/series/Hadoop/mapred_tasktracker:tasks_failed_timeout/session/hosts"}
+{"name":"data","type":"select_multiple","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:mapsRunning/session/cluster","label":"Metric","options":[
+{"label":"Map Tasks Running","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:mapsRunning/session/cluster"},
+{"label":"Map Task Slots","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:mapTaskSlots/session/cluster"},
+{"label":"Reduce Tasks Running","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:reduceRunning/session/cluster"},
+{"label":"Reduce Task Slots","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:reduceTaskSlots/session/cluster"},
+{"label":"Occupied Map Task Slots","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:occupiedMapTaskSlots/session/cluster"},
+{"label":"Occupied Reduce Task Slots","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:occupiedReduceTaskSlots/session/cluster"},
+{"label":"Tasks Completed","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:tasksCompleted/session/cluster"},
+{"label":"Tasks Failed Ping","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:tasksFailedPing/session/cluster"},
+{"label":"Tasks Failed Timeout","value":"/hicc/v1/metrics/series/ClusterSummary/mapreduce:tasksFailedTimeout/session/cluster"}
 ]},
 {"name":"width","type":"select","value":"300","label":"Width","options":[
 {"label":"300","value":"300"},

Modified: incubator/chukwa/trunk/src/main/web/hicc/jsp/debug.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/jsp/debug.jsp?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/jsp/debug.jsp (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/jsp/debug.jsp Tue Nov 29 08:40:38 2011
@@ -17,8 +17,12 @@
  * limitations under the License.
  */
 %>
-<%@ page import = "javax.servlet.http.*, java.sql.*,java.io.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*" %>
+<%@ page import = "javax.servlet.http.*, java.sql.*,java.io.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.util.XssFilter" %>
 <%
+       XssFilter xf = new XssFilter(request);
+       response.setContentType("text/html; chartset=UTF-8//IGNORE");
+       response.setHeader("boxId", xf.getParameter("boxId"));
+
        for (Enumeration e = session.getAttributeNames() ; e.hasMoreElements() ;) {
            String name = (String) e.nextElement();
            out.println(name+":"+session.getAttribute(name).toString());

Modified: incubator/chukwa/trunk/src/main/web/hicc/jsp/event.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/jsp/event.jsp?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/jsp/event.jsp (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/jsp/event.jsp Tue Nov 29 08:40:38 2011
@@ -21,20 +21,31 @@
 <% TimeHandler time = new TimeHandler(request, (String)session.getAttribute("time_zone"));
    long start = time.getStartTime();
    long end = time.getEndTime();
+   long midpoint = (end+start)/2;
    SimpleDateFormat formatter = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
-   String startDate = formatter.format(start);
+   String startDate = formatter.format(midpoint);
    String endDate = formatter.format(end);
    String intervalUnit1="MINUTE";
    String intervalUnit2="HOUR";
-   if(((end-start)/1000)>(15*60*60*24)) {
+   int intervalPixels = 10;
+   if(((end-start)/1000)>=(60*60*24*3)) {
        intervalUnit1 = "DAY";
        intervalUnit2 = "WEEK";
-   } else if(((end-start)/1000)>(60*60*24*3)) {
+       intervalPixels = 600;
+       if(((end-start)/1000)>(60*60*24*15)) {
+         intervalPixels = 300;
+       }
+   } else if(((end-start)/1000)>(60*60*6)) {
        intervalUnit1 = "HOUR";
        intervalUnit2 = "DAY";
+       intervalPixels = 600;
    } else {
        intervalUnit1 = "MINUTE";
        intervalUnit2 = "HOUR";
+       intervalPixels = 600;
+       if(((end-start)/1000)>(60*60*3)) {
+         intervalPixels = 250;
+       }
    }
 %>
 <html>
@@ -58,7 +69,7 @@
                 date:           "<%= startDate %>  GMT",
                 width:          "100%", 
                 intervalUnit:   Timeline.DateTime.<%= intervalUnit2 %>, 
-                intervalPixels: 200,
+                intervalPixels: <%= intervalPixels %>,
                 theme: theme,
             })
           ];

Modified: incubator/chukwa/trunk/src/main/web/hicc/jsp/event_viewer.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/jsp/event_viewer.jsp?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/jsp/event_viewer.jsp (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/jsp/event_viewer.jsp Tue Nov 29 08:40:38 2011
@@ -23,7 +23,7 @@
    response.setHeader("boxId", xf.getParameter("boxId"));
    if(xf.getParameter("type").equals("list")) {
 %>
-<IFRAME id="<%= xf.getParameter("boxId") %>iframe" src="/hicc/jsp/event_viewer_helper.jsp?<%= xf.filter(request.getQueryString()) %>" width="100%" frameborder="0" height="400" scrolling="no"></IFRAME>
+<IFRAME id="<%= xf.getParameter("boxId") %>iframe" src="/hicc/jsp/job_viewer.jsp?<%= xf.filter(request.getQueryString()) %>" width="100%" frameborder="0" height="400" scrolling="no"></IFRAME>
 <% } else { %>
 <IFRAME id="<%= xf.getParameter("boxId") %>iframe" src="/hicc/jsp/event.jsp" width="100%" frameborder="0" height="600"></IFRAME>
 <% } %>

Modified: incubator/chukwa/trunk/src/main/web/hicc/jsp/events-xml.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/jsp/events-xml.jsp?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/jsp/events-xml.jsp (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/jsp/events-xml.jsp Tue Nov 29 08:40:38 2011
@@ -17,7 +17,9 @@
  * limitations under the License.
  */
 %><?xml version="1.0" encoding="UTF-8"?>
-<%@ page import = "java.util.Calendar, java.util.Date, java.sql.*, java.text.SimpleDateFormat, java.util.*, java.sql.*,java.io.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.database.DatabaseConfig, org.apache.hadoop.chukwa.util.XssFilter" %>
+<%@ page import = "java.util.Calendar, java.util.Date, java.sql.*, java.text.SimpleDateFormat, java.util.*, java.sql.*,java.io.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.database.DatabaseConfig, org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.hbase.HBaseConfiguration, org.apache.hadoop.hbase.client.HTableInterface, org.apache.hadoop.hbase.client.HTablePool, org.apache.hadoop.hbase.client.Result, org.apache.hadoop.hbase.client.ResultScanner, org.apache.hadoop.hbase.client.Scan, org.apache.hadoop.conf.Configuration" %>
+<%! final static private Configuration hconf = HBaseConfiguration.create(); %>
+<%! final static private HTablePool pool = new HTablePool(hconf, 60); %>
 <%
     response.setContentType("text/xml");
     XssFilter xf = new XssFilter(request);
@@ -25,74 +27,63 @@
     long start = time.getStartTime();
     long end = time.getEndTime();
     String cluster = (String) session.getAttribute("cluster");
-    String table = "mr_job";
-    if(xf.getParameter("event_type")!=null) {
-      table = xf.getParameter("event_type");
-    }
-    String query = "select job_id,user,submit_time,launch_time,finish_time,status from ["+table+"] where finish_time between '[start]' and '[end]'";
-    Macro mp = new Macro(start,end,query, request);
-    query = mp.toString();
 
+    HTableInterface table = pool.getTable("Jobs");
+
+    String family = "summary";
+
+    Scan scan = new Scan();
+    scan.addColumn(family.getBytes(), "jobId".getBytes());
+    scan.addColumn(family.getBytes(), "user".getBytes());
+    scan.addColumn(family.getBytes(), "submitTime".getBytes());
+    scan.addColumn(family.getBytes(), "launchTime".getBytes());
+    scan.addColumn(family.getBytes(), "finishTime".getBytes());
+    scan.addColumn(family.getBytes(), "status".getBytes());
+    scan.setTimeRange(start, end);
+    scan.setMaxVersions();
+
+    ResultScanner results = table.getScanner(scan);
+    Iterator<Result> it = results.iterator();
     ArrayList<HashMap<String, Object>> events = new ArrayList<HashMap<String, Object>>();
 
-    Connection conn = null;
-    Statement stmt = null;
-    ResultSet rs = null;
-
-    DatabaseWriter dbw = new DatabaseWriter(cluster);
-    try {
-        rs = dbw.query(query);
-        ResultSetMetaData rmeta = rs.getMetaData();
-        int col = rmeta.getColumnCount();
-        while (rs.next()) {
-          HashMap<String, Object> event = new HashMap<String, Object>();
-          long event_time=0;
-          for(int i=1;i<=col;i++) {
-            if(rmeta.getColumnType(i)==java.sql.Types.TIMESTAMP) {
-              event.put(rmeta.getColumnName(i),rs.getTimestamp(i).getTime());
-            } else {
-              event.put(rmeta.getColumnName(i),rs.getString(i));
-            }
-          }
-          events.add(event);
-        }
-    // Now do something with the ResultSet ....
-    } catch (SQLException ex) {
-      // handle any errors
-      //out.println("SQLException: " + ex.getMessage());
-      //out.println("SQLState: " + ex.getSQLState());
-      //out.println("VendorError: " + ex.getErrorCode());
-    } finally {
-      // it is a good idea to release
-      // resources in a finally{} block
-      // in reverse-order of their creation
-      // if they are no-longer needed
-      dbw.close();
+    while(it.hasNext()) {
+      Result result = it.next();
+      HashMap<String, Object> event = new HashMap<String, Object>();
+      event.put("jobId", new String(result.getValue(family.getBytes(), "jobId".getBytes())));
+      event.put("user", new String(result.getValue(family.getBytes(), "user".getBytes())));
+      event.put("submitTime", Long.parseLong(new String(result.getValue(family.getBytes(), "submitTime".getBytes()))));
+      event.put("launchTime", Long.parseLong(new String(result.getValue(family.getBytes(), "launchTime".getBytes()))));
+      event.put("finishTime", Long.parseLong(new String(result.getValue(family.getBytes(), "finishTime".getBytes()))));
+      event.put("status", new String(result.getValue(family.getBytes(), "status".getBytes())));
+      events.add(event);
     }
+    results.close();
+    table.close();
+
 %>
 <data>
 <%
     SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
     for(int i=0;i<events.size();i++) {
       HashMap<String, Object> event = events.get(i);
-      start=(Long)event.get("submit_time");
-      end=(Long)event.get("finish_time");
+      start=(Long)event.get("submitTime");
+      end=(Long)event.get("finishTime");
       String event_time = format.format(start);
-      String launch_time = format.format(event.get("launch_time"));
+      String launch_time = format.format((Long)event.get("launchTime"));
       String event_end_time = format.format(end);
       String cell = (String) event.get("_event");
-      if(event.get("status").toString().intern()=="failed".intern()) {
+      if(!event.get("status").toString().equals("SUCCEEDED")) {
 %>
-      <event start="<%= event_time %> GMT" latestStart="<%= launch_time %> GMT" end="<%= event_end_time %> GMT" title="Job ID: <%= event.get("job_id") %>" link="/hicc/jsp/job_viewer.jsp?job_id=<%= event.get("job_id") %>" isDuration="true" color="#f00">
-      Job ID: <%= event.get("job_id") %>
+      <event start="<%= event_time %> GMT" latestStart="<%= launch_time %> GMT" end="<%= event_end_time %> GMT" title="Job ID: <%= event.get("jobId") %>" link="/hicc/jsp/job_viewer.jsp?job_id=<%= event.get("jobId") %>" isDuration="true" color="#f00">
+      Job ID: <%= event.get("jobId") %>
       User: <%= event.get("user") %>
       Status: <%= event.get("status") %>
       </event>
 <%
       } else {
 %>
-      <event start="<%= event_time %> GMT" latestStart="<%= launch_time %> GMT" end="<%= event_end_time %> GMT" title="Job ID: <%= event.get("job_id") %>" link="/hicc/jsp/job_viewer.jsp?job_id=<%= event.get("job_id") %>" isDuration="true">
-      Job ID: <%= event.get("job_id") %>
+      <event start="<%= event_time %> GMT" latestStart="<%= launch_time %> GMT" end="<%= event_end_time %> GMT" title="Job ID: <%= event.get("jobId") %>" link="/hicc/jsp/job_viewer.jsp?job_id=<%= event.get("jobId") %>" isDuration="true">
+      Job ID: <%= event.get("jobId") %>
       User: <%= event.get("user") %>
       Status: <%= event.get("status") %>
       </event>

Modified: incubator/chukwa/trunk/src/main/web/hicc/jsp/job_viewer.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/jsp/job_viewer.jsp?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/jsp/job_viewer.jsp (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/jsp/job_viewer.jsp Tue Nov 29 08:40:38 2011
@@ -17,10 +17,13 @@
  * limitations under the License.
  */
 %>
-<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat,java.sql.*,java.io.*, org.json.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.chukwa.database.DatabaseConfig, java.util.ArrayList"  %> 
+<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat,java.sql.*,java.io.*, org.json.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.chukwa.database.DatabaseConfig, java.util.ArrayList, org.apache.hadoop.hbase.HBaseConfiguration, org.apache.hadoop.hbase.client.HTableInterface, org.apache.hadoop.hbase.client.HTablePool, org.apache.hadoop.hbase.client.Result, org.apache.hadoop.hbase.client.ResultScanner, org.apache.hadoop.hbase.client.Scan, org.apache.hadoop.conf.Configuration"  %> 
+<%! final static private Configuration hconf = HBaseConfiguration.create(); %>
+<%! final static private HTablePool pool = new HTablePool(hconf, 60); %>
 <%
     XssFilter xf = new XssFilter(request);
     NumberFormat nf = new DecimalFormat("###,###,###,##0.00");
+    SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
     response.setHeader("boxId", xf.getParameter("boxId"));
     response.setContentType("text/html; chartset=UTF-8//IGNORE"); %>
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
@@ -36,192 +39,95 @@
 <%
     String boxId=xf.getParameter("boxId");
     String cluster = (String) session.getAttribute("cluster");
-    DatabaseWriter dbw = new DatabaseWriter(cluster);
-    String path = "";
-    Calendar now = Calendar.getInstance();
-    HashMap<String, Integer> index = new HashMap<String, Integer>();
-    long start = 0;
-    long end = now.getTimeInMillis();
-    String startS="";
-    String endS="";
+
     TimeHandler time = new TimeHandler(request, (String)session.getAttribute("time_zone"));
-    startS = time.getStartTimeText();
-    endS = time.getEndTimeText();
-    start = time.getStartTime();
-    end = time.getEndTime();
-    Macro mp = new Macro(start,end,"[util]", request);
-    int averageBy=600;
-    String tmpTable = mp.toString();
-    DatabaseConfig dbc = new DatabaseConfig();
-    String[] tableList = dbc.findTableNameForCharts("util", start, end);
-    if(tableList[0].endsWith("_week")) {
-      averageBy=600;
-    } else if(tableList[0].endsWith("_month")) {
-      averageBy=600;
-    } else if(tableList[0].endsWith("_quarter")) {
-      averageBy=1800;
-    } else if(tableList[0].endsWith("_year")) {
-      averageBy=10800;
-    } else if(tableList[0].endsWith("_decade")) {
-      averageBy=43200;
-    }
-    StringBuilder queryBuilder = new StringBuilder();
-    String query = "";
-    queryBuilder.append("select * from [mr_job] where finish_time between '[start]' and '[end]' ");
-    if(xf.getParameter("job_id")!=null) {
-      queryBuilder = new StringBuilder();
-      mp = new Macro(start,end,"[mr_job]", request);
-      query = mp.toString();
-      queryBuilder.append("select * from ");
-      queryBuilder.append(query);
-      queryBuilder.append(" where finish_time between ? and ? ");
-      queryBuilder.append(" and job_id=?");
-      ArrayList<Object> parms = new ArrayList<Object>();
-      parms.add(new Timestamp(start));
-      parms.add(new Timestamp(end));
-      parms.add(xf.getParameter("job_id"));
-      query = queryBuilder.toString();
-      ResultSet rs = dbw.query(query, parms);
-      ResultSetMetaData rmeta = rs.getMetaData();
-      int col = rmeta.getColumnCount();
-      JSONObject data = new JSONObject();
-      JSONArray rows = new JSONArray();
-      int total=0;
-      while(rs.next()) {
-        JSONArray cells = new JSONArray();
-        out.println("<table id=\"job_summary\">");
-        out.println("<tr><td>Job ID</td><td>User</td><td>Queue</td><td>Status</td><td>Submit Time</td><td>Launch Time</td><td>Finish Time</td></tr>");
-        out.println("<tr>");
-        for(int i=1;i<=7;i++) {
-          out.println("<td>");
-          out.println(rs.getString(i));
-          out.println("</td>");
-        }
-        out.println("</tr></table>");
-        out.println("<table id=\"job_counters\">");
-        out.println("<tr><td colspan=2>HDFS</td><td colspan=2>Map Phase</td><td colspan=2>Combine Phase</td><td colspan=2>Reduce Phase</td></tr>");
-        out.println("<tr><td>Bytes Read</td><td>");
-        if(rs.getString(8)!=null) {
-          out.println(rs.getString(8));
-        }
-        out.println("</td>");
-        out.println("<td>Launched Map Tasks</td><td>");
-        if(rs.getString(12)!=null) {
-          out.println(rs.getString(12));
-        }
-        out.println("</td>");
-        out.println("<td>Combine Input Records</td><td>");
-        if(rs.getString(18)!=null) {
-          out.println(rs.getString(18));
-        }
-        out.println("</td>");
-        out.println("<td>Launched Reduce Tasks</td><td>");
-        if(rs.getString(13)!=null) {
-          out.println(rs.getString(13));
-        }
-        out.println("</td>");
-        out.println("</tr>");
-        out.println("<tr><td>Bytes Written</td><td>");
-        if(rs.getString(9)!=null) {
-          out.println(rs.getString(9));
-        }
-        out.println("</td>");
-        out.println("<td>Data Local Map Tasks</td><td>");
-        if(rs.getString(13)!=null) {
-          out.println(rs.getString(13));
-        }
-        out.println("</td>");
-        out.println("<td>Combine Output Records</td><td>");
-        if(rs.getString(19)!=null) {
-          out.println(rs.getString(19));
-        }
-        out.println("</td>");
-        out.println("<td>Data Local Reduce Tasks</td><td>");
-        if(rs.getString(14)!=null) {
-          out.println(rs.getString(14));
-        }
-        out.println("</td></tr>");
-        out.println("<tr><td colspan=2>Local</td>");
-        out.println("<td>Map Input Bytes</td><td>");
-        if(rs.getString(14)!=null) {
-          out.println(rs.getString(14));
-        }
-        out.println("<td>Spilled Records</td><td>");
-        if(rs.getString(20)!=null) {
-          out.println(rs.getString(20));
-        }
-        out.println("</td>");
-        out.println("<td>Reduce Input Group</td><td>");
-        if(rs.getString(21)!=null) {
-          out.println(rs.getString(21));
-        }
-        out.println("</td></tr>");
-        out.println("<tr><td>Bytes Read</td><td>");
-        if(rs.getString(10)!=null) {
-          out.println(rs.getString(10));
-        }
-        out.println("</td>");
-        out.println("<td>Map Output Bytes</td><td>");
-        if(rs.getString(15)!=null) {
-          out.println(rs.getString(15));
-        }
-        out.println("</td>");
-        out.println("<td colspan=2></td>");
-        out.println("<td>Reduce Output Groups</td><td>");
-        if(rs.getString(23)!=null) {
-          out.println(rs.getString(23));
-        }
-        out.println("</td></tr>");
-        out.println("<tr><td>Bytes Written</td><td>");
-        if(rs.getString(11)!=null) {
-          out.println(rs.getString(11));
-        }
-        out.println("</td><td>Map Input Records</td><td>");
-        if(rs.getString(16)!=null) {
-          out.println(rs.getString(16));
-        }
-        out.println("</td><td colspan=2></td>");
-        out.println("<td>Reduce Input Records</td><td>");
-        if(rs.getString(24)!=null) {
-          out.println(rs.getString(24));
-        }
-        out.println("</td></tr>");
-        out.println("<tr><td colspan=2></td>");
-        out.println("<td>Map Output Records</td><td>");
-        if(rs.getString(17)!=null) {
-          out.println(rs.getString(17));
-        }
-        out.println("</td><td colspan=2></td>");
-        out.println("<td>Reduce Output Records</td><td>");
-        if(rs.getString(25)!=null) {
-          out.println(rs.getString(25));
-        }
-        out.println("</td></tr>");
+    long start = time.getStartTime();
+    long end = time.getEndTime();
+
+    HTableInterface table = pool.getTable("Jobs");
+    String family = "summary";
 
-        out.println("</table>");
-        JSONObject job = new JSONObject(rs.getString(27));
-        Iterator<String> keys = job.keys();
-        out.println("<table id=\"job_conf\">");
-        while(keys.hasNext()) {
-          String key = (String) keys.next();
-          out.println("<tr><td>");
-          out.println(key);
-          out.println("</td><td>");
-          out.println(job.get(key));
-          out.println("</td></tr>");
-        }
-        out.println("</table>");
+    Scan scan = new Scan();
+    scan.addColumn(family.getBytes(), "jobId".getBytes());
+    scan.addColumn(family.getBytes(), "user".getBytes());
+    scan.addColumn(family.getBytes(), "submitTime".getBytes());
+    scan.addColumn(family.getBytes(), "launchTime".getBytes());
+    scan.addColumn(family.getBytes(), "finishTime".getBytes());
+    scan.addColumn(family.getBytes(), "status".getBytes());
+    scan.addColumn(family.getBytes(), "cluster".getBytes());
+    scan.addColumn(family.getBytes(), "queue".getBytes());
+    scan.addColumn(family.getBytes(), "numMaps".getBytes());
+    scan.addColumn(family.getBytes(), "numReduces".getBytes());
+    scan.addColumn(family.getBytes(), "numSlotsPerMap".getBytes());
+    scan.addColumn(family.getBytes(), "numSlotsPerReduce".getBytes());
+    scan.addColumn(family.getBytes(), "mapSlotSeconds".getBytes());
+    scan.addColumn(family.getBytes(), "reduceSlotsSeconds".getBytes());
+    scan.addColumn(family.getBytes(), "status".getBytes());
+    scan.setTimeRange(start, end);
+    scan.setMaxVersions();
+
+    ResultScanner results = table.getScanner(scan);
+    Iterator<Result> it = results.iterator();
+%>
+<table id="job_summary">
+<tr>
+  <td>Job ID</td>
+  <td>Cluster</td>
+  <td>User</td>
+  <td>Queue</td>
+  <td>Status</td>
+  <td>Submit Time</td>
+  <td>Launch Time</td>
+  <td>Finish Time</td>
+  <td>Number of Maps</td>
+  <td>Number of Reduces</td>
+  <td>Number of Slots Per Map</td>
+  <td>Number of Slots Per Reduce</td>
+  <td>Map Slots Seconds</td>
+  <td>Reduce Slots Seconds</td>
+</tr>
+<%
+    while(it.hasNext()) {
+      Result result = it.next();
+      boolean print = true;
+      if(xf.getParameter("job_id")!=null) {
+        print = false;
+      }
+      String jobId = new String(result.getValue(family.getBytes(), "jobId".getBytes()));
+      if(jobId.equals(xf.getParameter("job_id"))) {
+        print = true;
+      }
+      if(cluster!=null && cluster.equals(new String(result.getValue(family.getBytes(), "cluster".getBytes())))) {
+        print = true;
+      }
+      if(print) {
+%>
+<tr>
+  <td><%= new String(result.getValue(family.getBytes(), "jobId".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "cluster".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "user".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "queue".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "status".getBytes())) %></td>
+  <td><%= format.format(Long.parseLong(new String(result.getValue(family.getBytes(), "submitTime".getBytes())))) %></td>
+  <td><%= format.format(Long.parseLong(new String(result.getValue(family.getBytes(), "launchTime".getBytes())))) %></td>
+  <td><%= format.format(Long.parseLong(new String(result.getValue(family.getBytes(), "finishTime".getBytes())))) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "numMaps".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "numReduces".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "numSlotsPerMap".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "numSlotsPerReduce".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "mapSlotSeconds".getBytes())) %></td>
+  <td><%= new String(result.getValue(family.getBytes(), "reduceSlotsSeconds".getBytes())) %></td>
+</tr>
+<%
       }
-      dbw.close();
-    } else {
-      out.println("Please select a Job ID.");
     }
+    results.close();
+    table.close();
 %>
+</table>
 <script type="text/javascript">
 $(document).ready(function(){
-  $('#job_summary').flexigrid({title:'Job Summary',height:'auto'});
-  $('#job_counters').flexigrid({title:'Job Counters',height:'auto'});
-  $('#job_conf').flexigrid({title:'Job Configuration'});
+  $('#job_summary').flexigrid({title:'Job Summary',height:'340'});
 });
 </script>
 </div></body></html>

Modified: incubator/chukwa/trunk/src/main/web/hicc/views/default.view
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/main/web/hicc/views/default.view?rev=1207789&r1=1207788&r2=1207789&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/main/web/hicc/views/default.view (original)
+++ incubator/chukwa/trunk/src/main/web/hicc/views/default.view Tue Nov 29 08:40:38 2011
@@ -1 +1 @@
-{"description":"default","pages":[{"title":"Cluster Status","layout":[[{"id":"time","title":"Time","description":"Global control to manipulate time across widgets","categories":"Global,Utility","parameters":[{"name":"time_zone","callback":"\/hicc\/jsp\/get_timezone_list.jsp","edit":1,"value":["UTC"],"label":"Time Zone","type":"select_callback"},{"name":"time_type","edit":1,"value":["range"],"label":"Style","type":"select","options":[{"value":"date","label":"Date Picker"},{"value":"range","label":"Time Period"},{"value":"slider","label":"Slider"}]}],"refresh":0,"url":"jsp\/time.jsp","version":"0.1"},{"id":"graph_explorer","title":"Graph Explorer","description":"Graph explorer for visualize data on Hbase.","categories":"Developer,Utilities","parameters":[{"name":"width","edit":1,"value":["300"],"label":"Width","type":"select","options":[{"value":"300","label":"300"},{"value":"400","label":"400"},{"value":"500","label":"500"},{"value":"600","label":"600"},{"value":"800","label"
 :"800"},{"value":"1000","label":"1000"},{"value":"1200","label":"1200"}]},{"name":"height","edit":1,"value":["1000"],"label":"Height","type":"select","options":[{"value":"300","label":"300"},{"value":"400","label":"450"},{"value":"500","label":"600"},{"value":"800","label":"800"},{"value":"1000","label":"1000"},{"value":"1200","label":"1200"}]}],"refresh":0,"url":"iframe\/jsp\/graph_explorer.jsp","version":"0.1"}]],"columns":1,"colSize":[99]}],"name":"default","owner":"system","permissionType":"public"}
+{"description":"default","pages":[{"title":"Cluster Status","layout":[[{"id":"time","title":"Time","description":"Global control to manipulate time across widgets","categories":"Global,Utility","parameters":[{"name":"time_zone","callback":"\/hicc\/jsp\/get_timezone_list.jsp","edit":1,"value":["UTC"],"label":"Time Zone","type":"select_callback"},{"name":"time_type","edit":1,"value":["range"],"label":"Style","type":"select","options":[{"value":"date","label":"Date Picker"},{"value":"range","label":"Time Period"},{"value":"slider","label":"Slider"}]}],"refresh":0,"url":"jsp\/time.jsp","version":"0.1"},{"id":"graph_explorer","title":"Graph Explorer","description":"Graph explorer for visualize data on Hbase.","categories":"Developer,Utilities","parameters":[{"name":"width","edit":1,"value":["300"],"label":"Width","type":"select","options":[{"value":"300","label":"300"},{"value":"400","label":"400"},{"value":"500","label":"500"},{"value":"600","label":"600"},{"value":"800","label"
 :"800"},{"value":"1000","label":"1000"},{"value":"1200","label":"1200"}]},{"name":"height","edit":1,"value":["1000"],"label":"Height","type":"select","options":[{"value":"300","label":"300"},{"value":"400","label":"450"},{"value":"500","label":"600"},{"value":"800","label":"800"},{"value":"1000","label":"1000"},{"value":"1200","label":"1200"}]}],"refresh":0,"url":"iframe\/jsp\/graph_explorer.jsp","version":"0.1"}]],"columns":1,"colSize":[99]}],"name":"default","owner":"admin","permissionType":"public"}



Mime
View raw message