chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r786741 - in /hadoop/chukwa/trunk: conf/database_create_tables.sql conf/mdl.xml src/web/hicc/descriptors/heatmap_datanode.descriptor src/web/hicc/jsp/heatmap-datanode-data-js.jsp src/web/hicc/jsp/heatmap_datanode.jsp
Date Sat, 20 Jun 2009 01:32:36 GMT
Author: eyang
Date: Sat Jun 20 01:32:36 2009
New Revision: 786741

URL: http://svn.apache.org/viewvc?rev=786741&view=rev
Log:
CHUKWA-299. Added HDFS Spatial heatmaps visualization. (Jiaqi Tan via Eric Yang)

Added:
    hadoop/chukwa/trunk/src/web/hicc/descriptors/heatmap_datanode.descriptor
    hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap-datanode-data-js.jsp
    hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap_datanode.jsp
Modified:
    hadoop/chukwa/trunk/conf/database_create_tables.sql
    hadoop/chukwa/trunk/conf/mdl.xml

Modified: hadoop/chukwa/trunk/conf/database_create_tables.sql
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/database_create_tables.sql?rev=786741&r1=786740&r2=786741&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/database_create_tables.sql (original)
+++ hadoop/chukwa/trunk/conf/database_create_tables.sql Sat Jun 20 01:32:36 2009
@@ -24,6 +24,7 @@
 drop table if exists QueueInfo;
 drop table if exists mapreduce_fsm_template;
 drop table if exists user_job_summary_template;
+drop table if exists filesystem_fsm_template;
 
 create table if not exists node_activity_template (
     timestamp  timestamp default CURRENT_TIMESTAMP,
@@ -750,3 +751,21 @@
     totalReduces double null,
     primary key(userid, timestamp)
 ) ENGINE=InnoDB;
+
+
+create table if not exists filesystem_fsm_template (
+    block_id VARCHAR(80),
+    unique_id VARCHAR(80),
+    client_id VARCHAR(80),
+    state_name VARCHAR(80),
+    hostname VARCHAR(80),
+    other_host VARCHAR(80),
+    start_time timestamp default 0,
+    finish_time timestamp default 0,
+    start_time_millis bigint default 0,
+    finish_time_millis bigint default 0,
+    status varchar(10) default 0,
+    bytes bigint default 0,
+    primary key(unique_id),
+    index(start_time, finish_time, unique_id)
+) ENGINE=InnoDB;

Modified: hadoop/chukwa/trunk/conf/mdl.xml
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/mdl.xml?rev=786741&r1=786740&r2=786741&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/mdl.xml (original)
+++ hadoop/chukwa/trunk/conf/mdl.xml Sat Jun 20 01:32:36 2009
@@ -1966,7 +1966,7 @@
 <property><name>metric.jobconfdata.job_conf.mapred.reducer.class</name><value>mr_reducer_cls</value></property>
 <property><name>metric.jobconfdata.job_conf.mapred.mapper.class</name><value>mr_mapper_cls</value></property>
 
-<!-- SALSA additions -->
+<!-- SALSA mapreduce fsm additions -->
 
 <property><name>report.db.name.mapreduce_fsm</name><value>mapreduce_fsm</value></property>
 <property><name>report.db.primary.key.mapreduce_fsm</name><value>unique_id</value></property>
@@ -1998,5 +1998,22 @@
 <property><name>report.db.name.userdailysummary</name><value>user_job_summary</value></property>
 <property><name>consolidator.table.user_job_summary</name><value>5,30,180,720</value></property>
 
+<!-- SALSA filesystem fsm additions -->
+<property><name>report.db.name.filesystem_fsm</name><value>filesystem_fsm</value></property>
+<property><name>report.db.primary.key.filesystem_fsm</name><value>unique_id</value></property>
+<property><name>consolidator.table.filesystem_fsm</name><value>5,30,180,720</value></property>
+<property><name>metric.filesystem_fsm.job_id</name><value>client_id</value></property>
+<property><name>metric.filesystem_fsm.state_uniq_id</name><value>unique_id</value></property>
+<property><name>metric.filesystem_fsm.task_id</name><value>block_id</value></property>
+<property><name>metric.filesystem_fsm.state_name</name><value>state_name</value></property>
+<property><name>metric.filesystem_fsm.host</name><value>hostname</value></property>
+<property><name>metric.filesystem_fsm.host_other</name><value>other_host</value>
</property>
+<property><name>metric.filesystem_fsm.time_start</name><value>start_time</value></property>
+<property><name>metric.filesystem_fsm.time_end</name><value>finish_time</value></property>
+<property><name>metric.filesystem_fsm.time_start_millis</name><value>start_time_millis</value></property>
+<property><name>metric.filesystem_fsm.time_end_millis</name><value>finish_time_millis</value></property>
+<property><name>metric.filesystem_fsm.state_string</name><value>status</value></property>
+<property><name>metric.filesystem_fsm.counter_bytes</name><value>bytes</value></property>
+
 </configuration>
 

Added: hadoop/chukwa/trunk/src/web/hicc/descriptors/heatmap_datanode.descriptor
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/descriptors/heatmap_datanode.descriptor?rev=786741&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/descriptors/heatmap_datanode.descriptor (added)
+++ hadoop/chukwa/trunk/src/web/hicc/descriptors/heatmap_datanode.descriptor Sat Jun 20 01:32:36
2009
@@ -0,0 +1,37 @@
+{
+"id":"heatmap_datanode_viewer",
+"title":"HDFS Heatmap",
+"version":"0.1",
+"categories":"Hadoop,Status",
+"module":"iframe/jsp/heatmap_datanode.jsp",
+"description":"Display Time sorted events",
+"screendump":"\/images\/start.png",
+"refresh":"15",
+"parameters":[
+{"name":"height","type":"select","value":"550","label":"Height","options":[
+{"label":"300","value":"300"},
+{"label":"450","value":"450"},
+{"label":"550","value":"550"},
+{"label":"650","value":"650"},
+{"label":"750","value":"750"},
+{"label":"850","value":"850"},
+]},
+{"name":"heatmap_datanode_stattype","type":"select","value":"transaction_count","label":"Statistic
Type",
+"options":[
+{"label":"Number of Transactions","value":"transaction_count"},
+{"label":"Average Duration","value":"avg_duration"},
+{"label":"Average Volume","value":"avg_volume"},
+{"label":"Total Duration","value":"total_duration"},
+{"label":"Total Volume","value":"total_volume"}
+]},
+{"name":"heatmap_datanode_state","type":"select","value":"read_local","label":"HDFS State",
+"options":[
+{"label":"Local Reads","value":"read_local"},
+{"label":"Remote Reads","value":"read_remote"},
+{"label":"Local Writes","value":"write_local"},
+{"label":"Remote Writes","value":"write_remote"},
+{"label":"Replication Writes","value":"write_replicated"}
+]}
+]
+}
+

Added: hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap-datanode-data-js.jsp
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap-datanode-data-js.jsp?rev=786741&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap-datanode-data-js.jsp (added)
+++ hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap-datanode-data-js.jsp Sat Jun 20 01:32:36
2009
@@ -0,0 +1,255 @@
+<%
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file 
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+%>
+<%@ page import = "java.util.Calendar, java.util.Date, java.sql.*, java.text.SimpleDateFormat,
java.util.*, java.sql.*,java.io.*,java.lang.Math, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat,
java.lang.StringBuilder, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler,
org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.database.DatabaseConfig,
org.apache.hadoop.chukwa.util.XssFilter" %>
+<%@ page session="true" %> 
+<%
+
+// TODO: Read parameters from query string or environment to choose mode
+// of data desired, e.g. duration, # transactions, etc. ; 
+// use widget properties to create selectors for these various modes
+
+response.setContentType("text/javascript");
+
+// initial setup
+XssFilter xf = new XssFilter(request);
+TimeHandler time = new TimeHandler(request, (String)session.getAttribute("time_zone"));
+long start = time.getStartTime();
+long end = time.getEndTime();
+String cluster = (String) session.getAttribute("cluster");
+String table = "filesystem_fsm";
+
+// decide type of statistics we want
+String query_stats_mode = (String) xf.getParameter("heatmap_datanode_stattype");
+if (query_stats_mode == null || query_stats_mode.length() <= 0) {
+  query_stats_mode = new String("transaction_count");
+}
+
+// decide type of state we're interested in
+String query_state = (String) xf.getParameter("heatmap_datanode_state");
+out.println("/" + "/ " + "state: " + query_state);
+if (query_state == null || query_state.length() <= 0) {
+  query_state = new String("read_local");
+}
+
+// actual work: process query
+if(xf.getParameter("event_type")!=null) {
+  table = xf.getParameter("event_type");
+}
+String query = "select block_id,start_time,finish_time,start_time_millis,finish_time_millis,status,state_name,hostname,other_host,bytes
from ["+table+"] where finish_time between '[start]' and '[end]' and state_name like '" +
query_state + "'";
+Macro mp = new Macro(start,end,query, request);
+query = mp.toString() + " order by start_time";
+
+out.println("/" + "/" + "datanode: " + query + " cluster: " + cluster);
+
+ArrayList<HashMap<String, Object>> events = new ArrayList<HashMap<String,
Object>>();
+
+Connection conn = null;
+Statement stmt = null;
+ResultSet rs = null;
+
+DatabaseWriter dbw = new DatabaseWriter(cluster);
+try {
+  rs = dbw.query(query);
+  ResultSetMetaData rmeta = rs.getMetaData();
+  int col = rmeta.getColumnCount();
+  while (rs.next()) {
+    HashMap<String, Object> event = new HashMap<String, Object>();
+    long event_time=0;
+    for(int i=1;i<=col;i++) {
+      if(rmeta.getColumnType(i)==java.sql.Types.TIMESTAMP) {
+        event.put(rmeta.getColumnName(i),rs.getTimestamp(i).getTime());
+      } else {
+        event.put(rmeta.getColumnName(i),rs.getString(i));
+      }
+    }
+    events.add(event);
+  }
+} catch (SQLException ex) {
+  // handle any errors
+  //out.println("SQLException: " + ex.getMessage());
+  //out.println("SQLState: " + ex.getSQLState());
+  //out.println("VendorError: " + ex.getErrorCode());
+} finally {
+  // it is a good idea to release
+  // resources in a finally{} block
+  // in reverse-order of their creation
+  // if they are no-longer needed
+  dbw.close();
+}
+%>
+
+function generateData() {
+<%
+  SimpleDateFormat format = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
+  HashMap<String, Integer> reduce_ytick_ids = new HashMap<String, Integer>();
+
+  out.println("/" + "/ " + events.size() + " results returned.");
+
+  HashSet<String> host_set = new HashSet<String>();
+  HashMap<String, Integer> host_indices = new HashMap<String, Integer>();
+
+  // collect hosts
+  for(int i = 0; i < events.size(); i++) {
+    HashMap<String, Object> event = events.get(i);
+    String curr_host = (String) event.get("hostname");
+    String other_host = (String) event.get("other_host");
+    host_set.add(curr_host);
+    host_set.add(other_host);
+  }
+  int num_hosts = host_set.size();
+  
+  Iterator<String> host_iter = host_set.iterator();
+  for (int i = 0; i < num_hosts && host_iter.hasNext(); i++) {
+    String curr_host = host_iter.next();
+    host_indices.put(curr_host, new Integer(i));
+  }
+  
+  out.println("/" + "/" + " Number of hosts: " + num_hosts);
+  long stats[][] = new long[num_hosts][num_hosts];
+  long count[][] = new long[num_hosts][num_hosts]; // used for averaging
+
+  int start_millis = 0, end_millis = 0;
+
+  // deliberate design choice to duplicate code PER possible operation
+  // otherwise we have to do the mode check N times, for N states returned
+  if (query_stats_mode.equals("transaction_count")) {
+    for(int i=0;i<events.size();i++) {
+      HashMap<String, Object> event = events.get(i);
+      start=(Long)event.get("start_time");
+      end=(Long)event.get("finish_time");
+      start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
+      end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
+      String cell = (String) event.get("state_name");      
+      String this_host = (String) event.get("hostname");
+      String other_host = (String) event.get("other_host");
+      int this_host_idx = host_indices.get(this_host).intValue();
+      int other_host_idx = host_indices.get(other_host).intValue();
+      
+      // from, to
+      stats[this_host_idx][other_host_idx] += 1;
+    }
+  } else if (query_stats_mode.equals("avg_duration")) {
+    for(int i=0;i<events.size();i++) {
+      HashMap<String, Object> event = events.get(i);
+      start=(Long)event.get("start_time");
+      end=(Long)event.get("finish_time");
+      start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
+      end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
+      String cell = (String) event.get("state_name");      
+      String this_host = (String) event.get("hostname");
+      String other_host = (String) event.get("other_host");
+      int this_host_idx = host_indices.get(this_host).intValue();
+      int other_host_idx = host_indices.get(other_host).intValue();
+      
+      long curr_val = end_millis - start_millis + ((end - start)*1000);
+      
+      // from, to
+      stats[this_host_idx][other_host_idx] += curr_val;
+      count[this_host_idx][other_host_idx] += 1;
+    }    
+    for (int i = 0; i < num_hosts; i++) {
+      for (int j = 0; j < num_hosts; j++) {
+        if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j];
+      }
+    }
+  } else if (query_stats_mode.equals("avg_volume")) {
+    for(int i=0;i<events.size();i++) {
+      HashMap<String, Object> event = events.get(i);
+      start=(Long)event.get("start_time");
+      end=(Long)event.get("finish_time");
+      start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
+      end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
+      String cell = (String) event.get("state_name");      
+      String this_host = (String) event.get("hostname");
+      String other_host = (String) event.get("other_host");
+      int this_host_idx = host_indices.get(this_host).intValue();
+      int other_host_idx = host_indices.get(other_host).intValue();
+      
+      long curr_val = Long.parseLong((String)event.get("bytes"));
+      
+      // from, to
+      stats[this_host_idx][other_host_idx] += curr_val;
+      count[this_host_idx][other_host_idx] += 1;
+    }    
+    for (int i = 0; i < num_hosts; i++) {
+      for (int j = 0; j < num_hosts; j++) {
+        if (count[i][j] > 0) stats[i][j] = stats[i][j] / count[i][j];
+      }
+    }
+  } else if (query_stats_mode.equals("total_duration")) {
+    for(int i=0;i<events.size();i++) {
+      HashMap<String, Object> event = events.get(i);
+      start=(Long)event.get("start_time");
+      end=(Long)event.get("finish_time");
+      start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
+      end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
+      String cell = (String) event.get("state_name");      
+      String this_host = (String) event.get("hostname");
+      String other_host = (String) event.get("other_host");
+      int this_host_idx = host_indices.get(this_host).intValue();
+      int other_host_idx = host_indices.get(other_host).intValue();
+      
+      double curr_val = end_millis - start_millis + ((end - start)*1000);
+      
+      // from, to
+      stats[this_host_idx][other_host_idx] += curr_val;
+    } 
+  } else if (query_stats_mode.equals("total_volume")) {
+    for(int i=0;i<events.size();i++) {
+      HashMap<String, Object> event = events.get(i);
+      start=(Long)event.get("start_time");
+      end=(Long)event.get("finish_time");
+      start_millis = Integer.parseInt(((String)event.get("start_time_millis")));
+      end_millis = Integer.parseInt(((String)event.get("finish_time_millis")));      
+      String cell = (String) event.get("state_name");      
+      String this_host = (String) event.get("hostname");
+      String other_host = (String) event.get("other_host");
+      int this_host_idx = host_indices.get(this_host).intValue();
+      int other_host_idx = host_indices.get(other_host).intValue();
+      
+      long curr_val = Long.parseLong((String)event.get("bytes"));
+      
+      // from, to
+      stats[this_host_idx][other_host_idx] += curr_val;
+    }    
+  }
+  
+%>
+heatmap_size = <%= num_hosts %>;
+heatmap_data = [<%
+  for (int i = 0; i < num_hosts; i++) {
+    for (int j = 0; j < num_hosts; j++) {
+      if (i > 0 || j > 0) out.println(",");
+      out.print("[" + i + "," + j + "," + stats[j][i] + "]");
+    }
+  }
+%>];
+heatmap_names = [<%
+  host_iter = host_set.iterator();
+  for (int i = 0; i < num_hosts && host_iter.hasNext(); i++) {
+    if (i > 0) out.print(",");
+    out.print("'" + host_iter.next() + "'");
+  }
+%>];
+
+ $("#resultcountholder").text("<%= events.size() %> states returned.");
+
+}
+

Added: hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap_datanode.jsp
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap_datanode.jsp?rev=786741&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap_datanode.jsp (added)
+++ hadoop/chukwa/trunk/src/web/hicc/jsp/heatmap_datanode.jsp Sat Jun 20 01:32:36 2009
@@ -0,0 +1,320 @@
+<%
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file 
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+%>
+<%@ page import = "java.util.Calendar, java.util.Date, java.sql.*, java.text.SimpleDateFormat,
java.util.*, java.sql.*,java.io.*,java.lang.Math, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat,
java.lang.StringBuilder, org.apache.hadoop.chukwa.util.XssFilter" %>
+<% response.setContentType("text/html"); %>
+<%
+
+XssFilter xf = new XssFilter(request);
+
+// decide type of statistics we want
+String query_stats_mode = (String) xf.getParameter("heatmap_datanode_stattype");
+if (query_stats_mode == null || query_stats_mode.length() <= 0) {
+  query_stats_mode = new String("transaction_count");
+}
+
+// decide type of state we're interested in
+String query_state = (String) xf.getParameter("heatmap_datanode_state");
+if (query_state == null || query_state.length() <= 0) {
+  query_state = new String("read_local");
+}
+
+HashMap<String, String> prettyStateNames = new HashMap<String, String>();
+
+prettyStateNames.put("read_local", "Local Block Reads");
+prettyStateNames.put("write_local", "Local Block Writes");
+prettyStateNames.put("read_remote", "Remote Block Reads");
+prettyStateNames.put("write_remote", "Remote Block Writes");
+prettyStateNames.put("write_replicated", "Replicated Block Writes");
+
+HashMap<String, String> prettyStatisticNames = new HashMap<String, String>();
+
+prettyStatisticNames.put("transaction_count", "Number of Transactions");
+prettyStatisticNames.put("avg_duration", "Average Duration<br />(ms)");
+prettyStatisticNames.put("avg_volume", "Average Volume<br />(bytes)");
+prettyStatisticNames.put("total_duration", "Total Duration<br />(ms)");
+prettyStatisticNames.put("total_volume", "Total Volume<br />(bytes)");
+
+%>
+  <html><head> 
+
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> 
+      <title>2D Spectrum Viewer</title> 
+      <link href="/hicc/css/heatmap/layout.css" rel="stylesheet" type="text/css"> 
+        <script language="javascript" type="text/javascript" src="/hicc/js/jquery-1.2.6.min.js"></script>
+        <script language="javascript" type="text/javascript" src="/hicc/js/jquery.flot.pack.js"></script>
+
+        <script language="javascript" type="text/javascript" src="/hicc/js/excanvas.pack.js"></script>
+        <script id="source" language="javascript" type="text/javascript" src="heatmap-datanode-data-js.jsp?heatmap_datanode_stattype=<%=
query_stats_mode %>&heatmap_datanode_state=<%= query_state %>"></script>

+        <script>
+          function activateplot()
+          {
+            document.getElementById('clearSelection').click();
+          }
+        </script>
+        <script id="source2" language="javascript" type="text/javascript">
+// to eventually be moved out
+// this takes the data structures from the data generation and
+// generates data structures for flot to plot
+
+          function d2h(d) {return (Math.round(d).toString(16));}
+
+          // external vars: heatmap_size, heatmap_data, heatmap_names
+          function generateGraph() 
+          {
+            var tmpstring = ' ';
+            var count = 0;
+            var minvalue = 0, maxvalue = 0;
+            var COLOR_MAX = 255;
+            var COLOR_MIN = 0;
+            var SCALE=1;
+            
+            color_array = new Array(heatmap_size * heatmap_size);
+            graph_data_array = new Array(heatmap_size * heatmap_size);
+            graph_data_array_small = new Array(heatmap_size * heatmap_size);
+            series_array = new Array(heatmap_size * heatmap_size);
+            ticknames_array = new Array(heatmap_size);
+            graph_tooltips = new Array(heatmap_size+1);
+            
+            var minstarted = 0;
+            for (i = 0; i < heatmap_size; i++) {
+              graph_tooltips[i+1] = new Array(heatmap_size+1);
+              for (j = 0; j < heatmap_size; j++) {
+                // determine min/max
+                if (count <= 0) {
+                  if (heatmap_data[count][2] > 0) {
+                    minvalue = heatmap_data[count][2];
+                    minstarted = 1;
+                  }
+                  maxvalue = heatmap_data[count][2];
+                } else {
+                  if (heatmap_data[count][2] > 0) {
+                    if (minstarted > 0) {
+                      minvalue = heatmap_data[count][2] > minvalue ? minvalue : heatmap_data[count][2];
+                    } else {
+                      minvalue = heatmap_data[count][2];
+                      minstarted = 1;
+                    }
+                  }
+                  maxvalue = heatmap_data[count][2] < maxvalue ? maxvalue : heatmap_data[count][2];
+                }
+                // create coordinates
+                // graph_data_array[count] = {
+                //   data: [i+1,j+1]
+                // };
+                // graph_data_array[count] = new Array(2);
+                // graph_data_array[count][0] = i+1;
+                // graph_data_array[count][1] = j+1;
+
+                graph_tooltips[i+1][j+1] = 'State: <%= prettyStateNames.get(query_state)
%><br />Statistic: <%= prettyStatisticNames.get(query_stats_mode) %><br
/>Value: ' 
+                  + heatmap_data[count][2] + "<br />From: " + heatmap_names[i] + "<br
/>"
+                  + "To: " + heatmap_names[j];
+                  
+                count++;                
+              }
+              ticknames_array[i] = [i+1, heatmap_names[i]];
+            }
+            
+            $("#scale_max_placeholder").text(maxvalue);
+            $("#scale_mid_placeholder").text(((maxvalue-minvalue)/2)+minvalue);
+            $("#scale_min_placeholder").text(minvalue);
+            
+            count = 0;
+            for (i = 0; i < heatmap_size; i++) {
+              for (j = 0; j < heatmap_size; j++) {
+                if (heatmap_data[count][2] == 0) {
+                  colorstring = '999999';
+                } else {
+                  colorstring = d2h((((heatmap_data[count][2] - minvalue) / (maxvalue - minvalue))
* (COLOR_MAX - COLOR_MIN)) + COLOR_MIN);
+                  var len=colorstring.length;
+                  for (k = len; k < 2; k++) colorstring = '0' + colorstring;
+                  colorstring = colorstring + '0000';
+                }
+                
+                colorstring = '#' + colorstring;
+                color_array[count] = colorstring;
+                series_array[count] = { lines: {show: true, radius:999} };
+
+                graph_data_array[count] = {
+                  points: {show: true, radius: 15, fill: true, fillColor: false}, 
+                  color: colorstring,
+                  data: [[(heatmap_data[count][0]+1)/SCALE, (heatmap_data[count][1]+1)/SCALE]]
+                }
+                graph_data_array_small[count] = {
+                  points: {show: true, radius: 4, fill: true, fillColor: false}, 
+                  color: colorstring,
+                  data: [[(heatmap_data[count][0]+1)/SCALE, (heatmap_data[count][1]+1)/SCALE]]
+                }
+
+                count++;
+              }
+            }
+
+            graph_options = {
+              grid: { hoverable: true },
+              yaxis: {autoscaleMargin: 0.1, ticks: ticknames_array },
+              xaxis: {autoscaleMargin: 0.1, ticks: ticknames_array },
+              selection: { mode: "xy" },
+              shadowSize: 0
+            };
+            graph_options_small = {
+              grid: { hoverable: true },
+              yaxis: {autoscaleMargin: 0.1, ticks: [] },
+              xaxis: {autoscaleMargin: 0.1, ticks: [] },
+              selection: { mode: "xy" },
+              shadowSize: 0
+            };            
+          }
+        </script>
+        <script id="source3" language="javascript" type="text/javascript">
+// to eventually be moved out
+// this generates the actual flot options
+
+function plotGraph() {
+
+var placeholder = $("#placeholder");
+var plot = $.plot(placeholder, graph_data_array, graph_options);
+
+var smallplotplaceholder = $("#smallplotplaceholder");
+var smallplot = $.plot(smallplotplaceholder, graph_data_array_small, graph_options_small);
+
+placeholder.bind("plotselected", function (event, ranges) {
+  if (ranges.xaxis.to - ranges.xaxis.from < 0.00001)
+    ranges.xaxis.to = ranges.xaxis.from + 0.00001;
+  if (ranges.yaxis.to - ranges.yaxis.from < 0.00001)
+    ranges.yaxis.to = ranges.yaxis.from + 0.00001;
+
+  plot = $.plot(placeholder, graph_data_array,
+    $.extend(true, {}, graph_options, {
+    grid: { hoverable: true },
+    xaxis: { min: ranges.xaxis.from, max: ranges.xaxis.to }, 
+    yaxis: { min: ranges.yaxis.from, max: ranges.yaxis.to }
+    }
+  )
+);
+
+// don't fire event on the overview to prevent eternal loop
+smallplot.setSelection(ranges, true);
+
+});
+
+smallplotplaceholder.bind("plotselected", function (event, ranges) {
+  plot.setSelection(ranges);
+});
+
+
+
+// Hover text
+var previousPoint = null;
+$("#placeholder").bind("plothover", function (event, pos, item) {
+  if (item) {
+    if (previousPoint != item.datapoint) {
+      previousPoint = item.datapoint;
+
+      $("#tooltip").remove();
+      var x = item.datapoint[0].toFixed(2),
+      y = item.datapoint[1].toFixed(2);
+
+      showTooltip(item.pageX, item.pageY, lookupStateInfo(item.datapoint[0], item.datapoint[1]));
+
+    } else {
+      $("#tooltip").remove();
+      previousPoint = null;            
+    }
+  }
+});       
+
+}
+
+function lookupStateInfo(x,y) {
+  return graph_tooltips[x][y];
+}
+
+function showTooltip(x, y, contents) {
+  $('<div id="tooltip">' + contents + '</div>').css( {
+    position: 'absolute',
+    display: 'none',
+    top: y + 5,
+    left: x + 5,
+    border: '1px solid #fdd',
+    padding: '2px',
+    'background-color': '#fee',
+    opacity: 0.80
+  }).appendTo("body").fadeIn(200);
+}
+
+        </script>
+      </head><div FirebugVersion="1.3.3" style="display: none;" id="_firebugConsole"></div><body
onload="generateData(); generateGraph(); plotGraph();">
+      <table cellpadding="0" cellspacing="0">
+        <tbody>
+          <tr>
+
+            <td align="right" valign="top" rowspan="1"><div id="placeholder" style="width:
600px; height: 400px; position: relative;"><canvas width="600" height="400"></canvas><canvas
style="position: absolute; left: 0px; top: 0px;" width="600" height="400"></canvas></div></td>
+
+            <td rowspan="1"><div style="width:10px">&nbsp;</div></td>
+
+            <td align="middle"><div id="smallplotplaceholder", style="width:166px;height:100px;"><canvas
style="position: absolute; left: 0px; top: 0px;" width="166" height="100"></canvas></div>
+            
+              <br />
+              
+              State: <b><%= prettyStateNames.get(query_state) %></b><br
/>
+              Statistic: <b><%= prettyStatisticNames.get(query_stats_mode) %></b>
+            
+            </td>
+
+          </tr>
+
+          <tr>
+            <td colspan="3" align="middle" valign="top">
+
+              <br />
+
+              <span id="resultcountholder">No results returned. </span>
+
+              <br />
+
+              <table cellpadding="0" cellspacing="4"><tbody>
+                <tr><th colspan="2">Scale</th></tr>
+                <tr>
+                  <td bgcolor="#ff0000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#cc0000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#990000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#7F0000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#660000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#330000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#000000">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                  <td bgcolor="#999999">&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</td>
+                </tr>
+                
+                <tr>
+                  <td><span id="scale_max_placeholder"></span></td>
+                  <td>&nbsp;</td>
+                  <td>&nbsp;</td>
+                  <td><span id="scale_mid_placeholder"></span></td>
+                  <td>&nbsp;</td>
+                  <td>&nbsp;</td>
+                  <td><span id="scale_min_placeholder"></span></td>
+                  <td>0</td>
+                </tr>
+              </tbody></table>
+            </td>
+          </tr>
+
+        </tbody></table> 
+      </body></html>



Mime
View raw message