chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r774531 - in /hadoop/chukwa/trunk/src: java/org/apache/hadoop/chukwa/hicc/ java/org/apache/hadoop/chukwa/util/ test/org/apache/hadoop/chukwa/database/ web/hicc/jsp/
Date Wed, 13 May 2009 21:04:10 GMT
Author: eyang
Date: Wed May 13 21:04:10 2009
New Revision: 774531

URL: http://svn.apache.org/viewvc?rev=774531&view=rev
Log:
CHUKWA-108.  Changed frontend SQL queries to use prepare statement. (Eric Yang)

Added:
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
Modified:
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
    hadoop/chukwa/trunk/src/web/hicc/jsp/job_viewer.jsp
    hadoop/chukwa/trunk/src/web/hicc/jsp/single-series-chart-javascript.jsp
    hadoop/chukwa/trunk/src/web/hicc/jsp/util.jsp

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java?rev=774531&r1=774530&r2=774531&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java Wed May
13 21:04:10 2009
@@ -41,7 +41,7 @@
   }
 
   public void execute(String query, boolean groupBySecondColumn,
-      boolean calculateSlope, String formatTime) {
+      boolean calculateSlope, String formatTime, List<Object> parameters) {
     SimpleDateFormat sdf = null;
     dataset.clear();
     try {
@@ -52,7 +52,7 @@
       // handle the error
     }
     Connection conn = null;
-    Statement stmt = null;
+    PreparedStatement stmt = null;
     ResultSet rs = null;
     int counter = 0;
     int size = 0;
@@ -61,9 +61,13 @@
     int labelsCount = 0;
     try {
       conn = org.apache.hadoop.chukwa.util.DriverManagerUtil.getConnection(jdbc);
-      stmt = conn.createStatement();
+      stmt = conn.prepareStatement(query);
+      for(int i=0;i<parameters.size();i++) {
+        int index = i+1;
+        stmt.setObject(index,parameters.get(i));
+      }
       // rs = stmt.executeQuery(query);
-      if (stmt.execute(query)) {
+      if (stmt.execute()) {
         rs = stmt.getResultSet();
         ResultSetMetaData rmeta = rs.getMetaData();
         int col = rmeta.getColumnCount();

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java?rev=774531&r1=774530&r2=774531&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java Wed May
13 21:04:10 2009
@@ -23,7 +23,9 @@
 import java.sql.SQLException;
 import java.sql.Connection;
 import java.sql.Statement;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.util.List;
 import java.text.SimpleDateFormat;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -33,6 +35,7 @@
   private static Log log = LogFactory.getLog(DatabaseWriter.class);
   private Connection conn = null;
   private Statement stmt = null;
+  private PreparedStatement pstmt = null;
   private ResultSet rs = null;
 
   public DatabaseWriter(String host, String user, String password) {
@@ -106,6 +109,27 @@
     return conn;
   }
 
+  public ResultSet query(String query, List<Object> parameters) throws SQLException
{
+    try {
+      pstmt = conn.prepareStatement(query);
+      for(int i=0;i<parameters.size();i++) {
+        int index = i+1;
+        pstmt.setObject(index,parameters.get(i));
+      }
+      rs = pstmt.executeQuery();
+    } catch (SQLException ex) {
+      // handle any errors
+      log.debug(ex, ex);
+      log.debug("SQL Statement:" + query);
+      log.debug("SQLException: " + ex.getMessage());
+      log.debug("SQLState: " + ex.getSQLState());
+      log.debug("VendorError: " + ex.getErrorCode());
+      throw ex;
+    } finally {
+    }
+    return rs;
+  }
+
   public ResultSet query(String query) throws SQLException {
     try {
       stmt = conn.createStatement();

Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java?rev=774531&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
(added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
Wed May 13 21:04:10 2009
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.database;
+
+import junit.framework.TestCase;
+import java.util.Calendar;
+import org.apache.hadoop.chukwa.database.Macro;
+import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.hadoop.chukwa.database.TableCreator;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.util.ArrayList;
+import java.util.Date;
+
+public class TestDatabasePrepareStatement extends TestCase {
+
+  long[] timeWindow = {7, 30, 91, 365, 3650};
+  String cluster = "demo";
+  long current = Calendar.getInstance().getTimeInMillis();
+
+  public void setUp() {
+    System.setProperty("CLUSTER","demo");
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    String buffer = "";
+    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+                 + File.separator + "database_create_tables.sql");
+    buffer = readFile(aFile);
+    String tables[] = buffer.split(";");
+    for(String table : tables) {
+      if(table.length()>5) {
+        db.execute(table);
+      }
+    }
+    db.close();
+    for(int i=0;i<timeWindow.length;i++) {
+      TableCreator tc = new TableCreator();
+      long start = current;
+      long end = current + (timeWindow[i]*1440*60*1000);
+      tc.createTables(start, end);
+    }
+  }
+
+  public void tearDown() {
+    DatabaseWriter db = null;
+    try {
+      db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query("show tables");
+      ArrayList<String> list = new ArrayList<String>();
+      while(rs.next()) {
+        String table = rs.getString(1);
+        list.add(table);
+      }
+      for(String table : list) {
+//        db.execute("drop table "+table);
+      }
+    } catch(Throwable ex) {
+    } finally {
+      if(db!=null) {
+        db.close();
+      }
+    }
+  }
+
+  public String readFile(File aFile) {
+    StringBuffer contents = new StringBuffer();
+    try {
+      BufferedReader input = new BufferedReader(new FileReader(aFile));
+      try {
+        String line = null; // not declared within while loop
+        while ((line = input.readLine()) != null) {
+          contents.append(line);
+          contents.append(System.getProperty("line.separator"));
+        }
+      } finally {
+        input.close();
+      }
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+    return contents.toString();
+  }
+
+  public void testPrepareStatement() {
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    Date today = new Date();
+    long current = today.getTime();
+    Timestamp timestamp = new Timestamp(current);
+    String hostname="chukwa.example.org";
+    String query = "insert into [system_metrics] set timestamp='"+timestamp.toString()+"',
host='"+hostname+"', cpu_user_pcnt=100;";
+    Macro mp = new Macro(current, current, query);
+    query = mp.toString();
+    try {
+      db.execute(query);
+      query = "select timestamp,host,cpu_user_pcnt from [system_metrics] where timestamp=?
and host=? and cpu_user_pcnt=?;";
+      mp = new Macro(current, current, query);
+      query = mp.toString();
+      ArrayList<Object> parms = new ArrayList<Object>();
+      parms.add(current);
+      parms.add(hostname);
+      parms.add(100); 
+      ResultSet rs = db.query(query, parms);
+      while(rs.next()) {
+        assertTrue(hostname.intern()==rs.getString(2).intern());
+        assertTrue(100==rs.getInt(3));
+      }
+      db.close();
+    } catch(SQLException ex) {
+      fail("Fail to run SQL statement:"+ExceptionUtil.getStackTrace(ex));
+    }
+  }
+
+}

Modified: hadoop/chukwa/trunk/src/web/hicc/jsp/job_viewer.jsp
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/jsp/job_viewer.jsp?rev=774531&r1=774530&r2=774531&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/jsp/job_viewer.jsp (original)
+++ hadoop/chukwa/trunk/src/web/hicc/jsp/job_viewer.jsp Wed May 13 21:04:10 2009
@@ -17,7 +17,7 @@
  * limitations under the License.
  */
 %>
-<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat,java.sql.*,java.io.*,
org.json.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig,
org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro,
org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.chukwa.database.DatabaseConfig"
 %> 
+<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat,java.sql.*,java.io.*,
org.json.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig,
org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro,
org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.chukwa.database.DatabaseConfig,
java.util.ArrayList"  %> 
 <%
     XssFilter xf = new XssFilter(request);
     NumberFormat nf = new DecimalFormat("###,###,###,##0.00");
@@ -69,12 +69,19 @@
     String query = "";
     queryBuilder.append("select * from [mr_job] where finish_time between '[start]' and '[end]'
");
     if(xf.getParameter("job_id")!=null) {
-      queryBuilder.append("and job_id='");
-      queryBuilder.append(xf.getParameter("job_id"));
-      queryBuilder.append("'");
-      mp = new Macro(start,end,queryBuilder.toString(), request);
+      queryBuilder = new StringBuilder();
+      mp = new Macro(start,end,"[mr_job]", request);
       query = mp.toString();
-      ResultSet rs = dbw.query(query);
+      queryBuilder.append("select * from ");
+      queryBuilder.append(query);
+      queryBuilder.append(" where finish_time between ? and ? ");
+      queryBuilder.append(" and job_id=?");
+      ArrayList<Object> parms = new ArrayList<Object>();
+      parms.add(new Timestamp(start));
+      parms.add(new Timestamp(end));
+      parms.add(xf.getParameter("job_id"));
+      query = queryBuilder.toString();
+      ResultSet rs = dbw.query(query, parms);
       ResultSetMetaData rmeta = rs.getMetaData();
       int col = rmeta.getColumnCount();
       JSONObject data = new JSONObject();

Modified: hadoop/chukwa/trunk/src/web/hicc/jsp/single-series-chart-javascript.jsp
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/jsp/single-series-chart-javascript.jsp?rev=774531&r1=774530&r2=774531&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/jsp/single-series-chart-javascript.jsp (original)
+++ hadoop/chukwa/trunk/src/web/hicc/jsp/single-series-chart-javascript.jsp Wed May 13 21:04:10
2009
@@ -17,17 +17,28 @@
  * limitations under the License.
  */
 %>
-<%@ page import = "java.sql.*,java.io.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat,
java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig, org.apache.hadoop.chukwa.hicc.TimeHandler,
org.apache.hadoop.chukwa.hicc.Chart, org.apache.hadoop.chukwa.hicc.DatasetMapper, org.apache.hadoop.chukwa.database.DatabaseConfig,
org.apache.hadoop.chukwa.database.Macro, org.apache.hadoop.chukwa.util.XssFilter"  %> 
-<%
-   XssFilter xf = new XssFilter(request);
-   response.setHeader("boxId", xf.getParameter("boxId"));
-   response.setContentType("text/html; chartset=UTF-8//IGNORE");
-%>
+<%@ page import = "java.sql.*" %>
+<%@ page import = "java.io.*" %>
+<%@ page import = "java.util.Calendar" %>
+<%@ page import = "java.util.Date" %>
+<%@ page import = "java.text.SimpleDateFormat" %>
+<%@ page import = "java.util.*" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.ClusterConfig" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.TimeHandler" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.Chart" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.DatasetMapper" %>
+<%@ page import = "org.apache.hadoop.chukwa.database.DatabaseConfig" %>
+<%@ page import = "org.apache.hadoop.chukwa.database.Macro" %>
+<%@ page import = "org.apache.hadoop.chukwa.util.XssFilter" %> 
 <%
+    XssFilter xf = new XssFilter(request);
+    response.setHeader("boxId", xf.getParameter("boxId"));
+    response.setContentType("text/html; chartset=UTF-8//IGNORE");
     String boxId=xf.getParameter("boxId");
     String render="line";
     String cluster = (String) session.getAttribute("cluster");
     String graphType = xf.getParameter("graph_type");
+    ArrayList<Object> parms = new ArrayList<Object>();
     int width=300;
     int height=200;
     if(request.getParameter("width")!=null) {
@@ -42,16 +53,6 @@
     }
     String match=xf.getParameter("match");
     String group = xf.getParameter("group");
-    if(match!=null) {
-        String matched = (String)session.getAttribute(match);
-        if(matched==null || matched.equals("")) {
-            match="";
-        } else {
-            match=match+"="+matched;
-        }
-    } else {
-        match="";
-    }
     ClusterConfig cc = new ClusterConfig();
     String jdbc = cc.getURL(cluster);
     String path = "";
@@ -90,20 +91,10 @@
     if(start<=0 || end<=0) { %>
 No time range specified.  Select a time range through widget preference, or use Time widget.
 <%  } else {
-       String timefield = "timestamp";
-       String dateclause = timefield+" >= '"+startS+"' and "+timefield+" <= '"+endS+"'";
+       String dateclause = " timestamp between ? and ? ";
        if(request.getParameter("period")!=null && request.getParameter("period").equals("0"))
{
            dateclause = "";
        }
-       String minclause = "";
-       if(request.getParameter("minnodes")!=null) {
-           minclause="and j.NumOfMachines >= "+xf.getParameter("minnodes");
-       }
-       String whereclause = "";
-       if(request.getParameter("user")!=null && !request.getParameter("user").equals(""))
{
-           whereclause="and j.UserID = "+xf.getParameter("user");
-       }
-       String mrtimeclause = "";
        try {
            org.apache.hadoop.chukwa.util.DriverManagerUtil.loadDriver().newInstance();
        } catch (Exception ex) {
@@ -117,22 +108,22 @@
            }
            int counter = 0;
            String[] group_items = ((String)session.getAttribute(xf.getParameter("group_items"))).split(",");
-           String appendDomain = xf.getParameter("append_domain");
-           if(appendDomain==null) {
-               appendDomain="";
-           }
            if(group_items!=null) {
+               StringBuilder matchBuilder = new StringBuilder();
                for(String item : group_items) {
                    if(counter!=0) {
-                       match = match + " or ";
+                       matchBuilder.append("or");
                    } else {
-                       match = "(";
+                       matchBuilder.append("(");
                    }
-                   match = match + group + " = '"+ item+ appendDomain +"'";
+                   matchBuilder.append(group);
+                   matchBuilder.append(" = ? ");
+                   parms.add(item);
                    counter++;
                }
-               if(!match.equals("")) {
-                   match = match + ")";
+               if(counter!=0) {
+                   matchBuilder.append(")");
+                   match = matchBuilder.toString();
                }
            }
        }
@@ -150,14 +141,29 @@
        TreeMap<String, TreeMap<String, Double>> dataMap = new TreeMap<String,
TreeMap<String, Double>>();
        for(String tmpTable : tables) {
            String query = null;
-           if(!dateclause.equals("") && !match.equals("")) {
-               dateclause=" and "+dateclause;
-           }
+           StringBuilder q = new StringBuilder();
+           q.append("select ");
+           q.append(timestamp);
+           q.append(",");
            if(group!=null) {
-               query = "select "+timestamp+","+group+","+metrics+" from "+tmpTable+" where
"+match+dateclause+" order by timestamp";
-           } else {
-               query = "select "+timestamp+","+metrics+" from "+tmpTable+" where "+match+dateclause+"
order by timestamp";
+             q.append(group);
+             q.append(",");
            }
+           q.append(metrics);
+           q.append(" from ");
+           q.append(tmpTable);
+           q.append(" where ");
+           if(match!=null) {
+             q.append(match);
+           }
+           if(match!=null && match.intern()!="".intern()) {
+             q.append(" and ");
+           }
+           q.append(dateclause);
+           q.append(" order by timestamp");
+           query = q.toString();
+           parms.add(startS);
+           parms.add(endS);
            DatasetMapper dataFinder = new DatasetMapper(jdbc);
            boolean groupBySecondColumn=false;
            if(group!=null) {
@@ -172,7 +178,7 @@
                Macro mp = new Macro(start,end,query, request);
                query = mp.toString();
            }
-           dataFinder.execute(query,groupBySecondColumn,odometer,graphType);
+           dataFinder.execute(query,groupBySecondColumn,odometer,graphType, parms);
            List<String> tmpLabels = dataFinder.getXAxisMap();
            TreeMap<String, TreeMap<String, Double>> tmpDataMap = dataFinder.getDataset();
            for(int t=0;t<tmpLabels.size();t++) {

Modified: hadoop/chukwa/trunk/src/web/hicc/jsp/util.jsp
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/web/hicc/jsp/util.jsp?rev=774531&r1=774530&r2=774531&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/web/hicc/jsp/util.jsp (original)
+++ hadoop/chukwa/trunk/src/web/hicc/jsp/util.jsp Wed May 13 21:04:10 2009
@@ -17,7 +17,20 @@
  * limitations under the License.
  */
 %>
-<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat,java.sql.*,java.io.*,
org.json.*, java.util.Calendar, java.util.Date, java.text.SimpleDateFormat, java.util.*, org.apache.hadoop.chukwa.hicc.ClusterConfig,
org.apache.hadoop.chukwa.hicc.TimeHandler, org.apache.hadoop.chukwa.util.DatabaseWriter, org.apache.hadoop.chukwa.database.Macro,
org.apache.hadoop.chukwa.util.XssFilter, org.apache.hadoop.chukwa.database.DatabaseConfig"
 %> 
+<%@ page import = "java.text.DecimalFormat,java.text.NumberFormat" %>
+<%@ page import = "java.sql.*" %>
+<%@ page import = "java.io.*" %>
+<%@ page import = "org.json.*" %>
+<%@ page import = "java.util.Calendar" %>
+<%@ page import = "java.util.Date" %>
+<%@ page import = "java.text.SimpleDateFormat" %>
+<%@ page import = "java.util.*" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.ClusterConfig" %>
+<%@ page import = "org.apache.hadoop.chukwa.hicc.TimeHandler" %>
+<%@ page import = "org.apache.hadoop.chukwa.util.DatabaseWriter" %>
+<%@ page import = "org.apache.hadoop.chukwa.database.Macro" %>
+<%@ page import = "org.apache.hadoop.chukwa.util.XssFilter" %>
+<%@ page import = "org.apache.hadoop.chukwa.database.DatabaseConfig" %>
 <%
     XssFilter xf = new XssFilter(request);
     NumberFormat nf = new DecimalFormat("###,###,###,##0.00");
@@ -87,10 +100,12 @@
         query = queryAdd.toString();
       }
     } else {
-      query = "select count(*) from [util] where timestamp between '[start]' and '[end]'
and queue='"+xf.getParameter("queue")+"' group by user;";
+      query = "select count(*) from [util] where timestamp between '[start]' and '[end]'
and queue=? group by user;";
       mp = new Macro(start,end,query, request);
       query = mp.toString();
-      ResultSet rs = dbw.query(query);
+      ArrayList<Object> parms = new ArrayList<Object>();
+      parms.add(xf.getParameter("queue"));
+      ResultSet rs = dbw.query(query, parms);
       if(rs.next()) {
         total = rs.getInt(1);
       }



Mime
View raw message