chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r785904 - in /hadoop/chukwa: branches/chukwa-0.2/conf/ branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/ branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/ branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/databas...
Date Thu, 18 Jun 2009 05:01:03 GMT
Author: eyang
Date: Thu Jun 18 05:01:02 2009
New Revision: 785904

URL: http://svn.apache.org/viewvc?rev=785904&view=rev
Log:
CHUKWA-304. Propagate exception upward for database test cases. (Eric Yang)

Removed:
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseSetup.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/TestDatabaseBaseTestCase.java
Modified:
    hadoop/chukwa/branches/chukwa-0.2/conf/aggregator.sql
    hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
    hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/TableCreator.java
    hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java
    hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java
    hadoop/chukwa/trunk/conf/aggregator.sql
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/TableCreator.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java

Modified: hadoop/chukwa/branches/chukwa-0.2/conf/aggregator.sql
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/conf/aggregator.sql?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/conf/aggregator.sql (original)
+++ hadoop/chukwa/branches/chukwa-0.2/conf/aggregator.sql Thu Jun 18 05:01:02 2009
@@ -118,12 +118,12 @@
 replace into [chukwahttpsender_quarter] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_month] where chukwa_timestamp between '[past_90_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/1800),recordname,hostname,contextname);
 replace into [chukwahttpsender_year] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_quarter] where chukwa_timestamp between '[past_540_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/10800),recordname,hostname,contextname);
 replace into [chukwahttpsender_decade] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_year] where chukwa_timestamp between '[past_2160_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/43200),recordname,hostname,contextname);
-
+#
 replace into [mr_job_conf_month] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_quarter] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_year] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_decade] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
-
+#
 replace into [user_job_summary_month] (select FLOOR(UNIX_TIMESTAMP(timestamp)/300),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_15_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/300),userid);
 replace into [user_job_summary_quarter] (select FLOOR(UNIX_TIMESTAMP(timestamp)/1800),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_90_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/1800),userid);
 replace into [user_job_summary_year] (select FLOOR(UNIX_TIMESTAMP(timestamp)/10800),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_540_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/10800),userid);

Modified: hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/Aggregator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/Aggregator.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/Aggregator.java Thu Jun 18 05:01:02 2009
@@ -74,18 +74,18 @@
     return contents.toString();
   }
 
-  public void process(long start, long end, String query) {
+  public void process(long start, long end, String query) throws Throwable {
     try {
       Macro macroProcessor = new Macro(start, end, query);
       query = macroProcessor.toString();
       db.execute(query);
-    } catch (Exception e) {
-      log.error(query);
-      log.error(ExceptionUtil.getStackTrace(e));
+    } catch(Exception e) {
+      log.error("Query: "+query);
+      throw new Exception("Aggregation failed for: "+query);
     }
   }
 
-  public void process(String query) {
+  public void process(String query) throws Throwable {
     long start = current;
     long end = current;
     process(current, current, query);
@@ -130,10 +130,14 @@
         } else {
           Aggregator dba = new Aggregator();
           long start = Calendar.getInstance().getTimeInMillis();
-          if(startTime!=0 && endTime!=0) {
-            dba.process(startTime, startTime, query[i]);
-          } else {
-            dba.process(query[i]);
+          try {
+            if(startTime!=0 && endTime!=0) {
+              dba.process(startTime, startTime, query[i]);
+            } else {
+              dba.process(query[i]);
+            }
+          } catch(Throwable e) {
+            log.error("Invalid query:"+query[i]);
           }
           long end = Calendar.getInstance().getTimeInMillis();
           long duration = end - start;

Modified: hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/TableCreator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/TableCreator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/TableCreator.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/database/TableCreator.java Thu Jun 18 05:01:02 2009
@@ -28,6 +28,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class TableCreator {
   private static DatabaseConfig dbc = null;
@@ -39,74 +40,70 @@
     }
   }
 
-  public void createTables() {
+  public void createTables() throws Exception {
     long now = (new Date()).getTime();
     createTables(now, now);
   }
 
-  public void createTables(long start, long end) {
+  public void createTables(long start, long end) throws Exception {
     String cluster = System.getProperty("CLUSTER");
     if (cluster == null) {
       cluster = "unknown";
     }
     DatabaseWriter dbw = new DatabaseWriter(cluster);
-    try {
-      HashMap<String, String> dbNames = dbc.startWith("report.db.name.");
-      Iterator<String> ki = dbNames.keySet().iterator();
-      while (ki.hasNext()) {
-        String name = ki.next();
-        String tableName = dbNames.get(name);
-        String[] tableList = dbc.findTableName(tableName, start, end);
-        log.debug("table name: " + tableList[0]);
-        try {
-          String[] parts = tableList[0].split("_");
-          int partition = Integer.parseInt(parts[parts.length - 2]);
-          String table = "";
-          for (int i = 0; i < parts.length - 2; i++) {
-            if (i != 0) {
-              table = table + "_";
-            }
-            table = table + parts[i];
-          }
-          String query = "show create table " + table + "_template;";
-          ResultSet rs = dbw.query(query);
-          while (rs.next()) {
-            log.debug("table schema: " + rs.getString(2));
-            query = rs.getString(2);
-            log.debug("template table name:" + table + "_template");
-            log.debug("replacing with table name:" + table + "_" + partition
-                + "_" + parts[parts.length - 1]);
-            log.debug("creating table: " + query);
-            String createPartition = query.replaceFirst(table + "_template",
-                table + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
-            partition++;
-            createPartition = query.replaceFirst(table + "_template", table
-                + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
-            partition++;
-            createPartition = query.replaceFirst(table + "_template", table
-                + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
+    HashMap<String, String> dbNames = dbc.startWith("report.db.name.");
+    Iterator<String> ki = dbNames.keySet().iterator();
+    while (ki.hasNext()) {
+      String name = ki.next();
+      String tableName = dbNames.get(name);
+      String[] tableList = dbc.findTableName(tableName, start, end);
+      log.debug("table name: " + tableList[0]);
+      try {
+        String[] parts = tableList[0].split("_");
+        int partition = Integer.parseInt(parts[parts.length - 2]);
+        String table = "";
+        for (int i = 0; i < parts.length - 2; i++) {
+          if (i != 0) {
+            table = table + "_";
           }
-        } catch (NumberFormatException e) {
-          log.error("Error in parsing table partition number, skipping table:"
-              + tableList[0]);
-        } catch (ArrayIndexOutOfBoundsException e) {
-          log.debug("Skipping table:" + tableList[0]
-              + ", because it has no partition configuration.");
-        } catch (SQLException e) {
-
+          table = table + parts[i];
+        }
+        String query = "show create table " + table + "_template;";
+        ResultSet rs = dbw.query(query);
+        while (rs.next()) {
+          log.debug("table schema: " + rs.getString(2));
+          query = rs.getString(2);
+          log.debug("template table name:" + table + "_template");
+          log.debug("replacing with table name:" + table + "_" + partition
+              + "_" + parts[parts.length - 1]);
+          log.debug("creating table: " + query);
+          String createPartition = query.replaceFirst(table + "_template",
+              table + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
+          partition++;
+          createPartition = query.replaceFirst(table + "_template", table
+              + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
+          partition++;
+          createPartition = query.replaceFirst(table + "_template", table
+              + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
         }
+      } catch (NumberFormatException e) {
+        log.error("Error in parsing table partition number, skipping table:"
+            + tableList[0]);
+      } catch (ArrayIndexOutOfBoundsException e) {
+        log.debug("Skipping table:" + tableList[0]
+            + ", because it has no partition configuration.");
+      } catch (SQLException e) {
+        throw e;
       }
-    } catch (Exception e) {
-      e.printStackTrace();
     }
   }
 
@@ -132,7 +129,11 @@
         usage();
       }
     } else {
-      tc.createTables();
+      try {
+        tc.createTables();
+      } catch (Exception e) {
+        log.error(ExceptionUtil.getStackTrace(e));
+      }
     }
 
   }

Modified: hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java Thu Jun 18 05:01:02 2009
@@ -82,7 +82,7 @@
     }
   }
 
-  public void execute(String query) {
+  public void execute(String query) throws SQLException {
     try {
       stmt = conn.createStatement();
       stmt.execute(query);
@@ -93,6 +93,7 @@
       log.error("SQLException: " + ex.getMessage());
       log.error("SQLState: " + ex.getSQLState());
       log.error("VendorError: " + ex.getErrorCode());
+      throw ex;
     } finally {
       if (stmt != null) {
         try {

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java Thu Jun 18 05:01:02 2009
@@ -20,6 +20,7 @@
 import java.io.*;
 import java.sql.*;
 import java.util.*;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
 import org.apache.hadoop.chukwa.database.TableCreator;
 
@@ -29,7 +30,7 @@
   public String cluster = "demo";
   public long current = Calendar.getInstance().getTimeInMillis();
 
-  public void setUpDatabase() {
+  public void setUpDatabase() throws Exception {
     System.setProperty("CLUSTER","demo");
     DatabaseWriter db = new DatabaseWriter(cluster);
     String buffer = "";

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java Thu Jun 18 05:01:02 2009
@@ -37,7 +37,11 @@
     public DatabaseSetup dbSetup = new DatabaseSetup();
 
     public void setUp() {
+      try{
 	dbSetup.setUpDatabase();
+      } catch (Exception e) {
+        fail(ExceptionUtil.getStackTrace(e));
+      }
     }
 
     public void tearDown() {

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java Thu Jun 18 05:01:02 2009
@@ -40,7 +40,7 @@
   String cluster = "demo";
   long current = Calendar.getInstance().getTimeInMillis();
 
-  public void setUp() {
+  public void setUp() throws Exception {
     System.setProperty("CLUSTER","demo");
     DatabaseWriter db = new DatabaseWriter(cluster);
     String buffer = "";

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java Thu Jun 18 05:01:02 2009
@@ -73,6 +73,7 @@
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.database.Macro;
 
 import junit.framework.TestCase;
@@ -177,7 +178,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data for database table: "+table);
+        }
       }
     }
     db.close();
@@ -185,7 +190,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables."+ExceptionUtil.getStackTrace(e));
+      }
     }    
   }
 

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java Thu Jun 18 05:01:02 2009
@@ -50,7 +50,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data from table:"+table);
+        }
       }
     }
     db.close();
@@ -58,7 +62,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables.");
+      }
     }
   }
 
@@ -73,7 +81,7 @@
         list.add(table);
       }
       for(String table : list) {
-//        db.execute("drop table "+table);
+        db.execute("drop table "+table);
       }
     } catch(Throwable ex) {
     } finally {

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java Thu Jun 18 05:01:02 2009
@@ -50,7 +50,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data for database table:"+table);
+        }
       }
     }
     db.close();
@@ -58,7 +62,12 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        e.printStackTrace();
+        fail("Fail to create database tables.");
+      }
     }
   }
 

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java Thu Jun 18 05:01:02 2009
@@ -54,7 +54,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data from database table: "+table);
+        }
       }
     }
     db.close();
@@ -62,7 +66,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables.");
+      }
     }
   }
 
@@ -107,7 +115,8 @@
   }
 
   public void testMetricDataLoader() {
-    String srcDir = System.getenv("CHUKWA_DATA_DIR");
+    boolean skip=false;
+    String srcDir = System.getenv("CHUKWA_DATA_DIR") + File.separator + "samples";
     try {
       ChukwaConfiguration conf = new ChukwaConfiguration();
       FileSystem fs = FileSystem.get(conf);
@@ -116,29 +125,34 @@
         MetricDataLoader mdl = new MetricDataLoader(conf, fs, sequenceFile.getPath().toUri().toString());
         mdl.call();
       }
+      if(sources.length==0) {
+        skip=true;
+      }
     } catch (Throwable ex) {
       fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
     }
-    DatabaseWriter db = new DatabaseWriter(cluster);
-    for(int i=0;i<tables.length;i++) {
-      String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
-      Macro mp = new Macro(current,query);
-      query = mp.toString();
-      try {
-        ResultSet rs = db.query(query);
-        ResultSetMetaData rsmd = rs.getMetaData();
-        int numberOfColumns = rsmd.getColumnCount();
-        while(rs.next()) {
-          for(int j=1;j<=numberOfColumns;j++) {
-            assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains no data.",rs.getString(j)!=null);
+    if(!skip) {
+      DatabaseWriter db = new DatabaseWriter(cluster);
+      for(int i=0;i<tables.length;i++) {
+        String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
+        Macro mp = new Macro(current,query);
+        query = mp.toString();
+        try {
+          ResultSet rs = db.query(query);
+          ResultSetMetaData rsmd = rs.getMetaData();
+          int numberOfColumns = rsmd.getColumnCount();
+          while(rs.next()) {
+            for(int j=1;j<=numberOfColumns;j++) {
+              assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains no data.",rs.getString(j)!=null);
+            }
           }
+        } catch(Throwable ex) {
+          fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
         }
-      } catch(Throwable ex) {
-        fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
       }
+      db.close();
+      assertTrue("MetricDataLoader executed successfully.",true);
     }
-    db.close();
-    assertTrue("MetricDataLoader executed successfully.",true);
   }
 
 }

Modified: hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java (original)
+++ hadoop/chukwa/branches/chukwa-0.2/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java Thu Jun 18 05:01:02 2009
@@ -84,7 +84,7 @@
 	  return cluster;
   }
 
-  protected void setUp() {
+  protected void setUp() throws Exception {
 	  dbSetup.setUpDatabase();
 	  String cluster = getCluster();
 	  DatabaseWriter dbw = new DatabaseWriter(cluster);

Modified: hadoop/chukwa/trunk/conf/aggregator.sql
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/aggregator.sql?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/aggregator.sql (original)
+++ hadoop/chukwa/trunk/conf/aggregator.sql Thu Jun 18 05:01:02 2009
@@ -118,12 +118,12 @@
 replace into [chukwahttpsender_quarter] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_month] where chukwa_timestamp between '[past_90_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/1800),recordname,hostname,contextname);
 replace into [chukwahttpsender_year] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_quarter] where chukwa_timestamp between '[past_540_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/10800),recordname,hostname,contextname);
 replace into [chukwahttpsender_decade] (select chukwa_timestamp,recordname,hostname,contextname,avg(httppost_rate),avg(httpthrowable_raw),avg(httpexception_rate),avg(httpthrowable),avg(httpthrowable_rate),avg(collectorrollover_rate),avg(httppost_raw),avg(period),avg(httpexception_raw),avg(httppost),avg(httptimeoutexception),avg(httptimeoutexception_raw),avg(collectorrollover_raw),avg(collectorrollover),avg(httptimeoutexception_rate),avg(httpexception)  from [chukwahttpsender_year] where chukwa_timestamp between '[past_2160_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(chukwa_timestamp)/43200),recordname,hostname,contextname);
-
+#
 replace into [mr_job_conf_month] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_quarter] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_year] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
 replace into [mr_job_conf_decade] (select * from [mr_job_conf_week] where ts between '[past_15_minutes]' and '[now]');
-
+#
 replace into [user_job_summary_month] (select FLOOR(UNIX_TIMESTAMP(timestamp)/300),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_15_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/300),userid);
 replace into [user_job_summary_quarter] (select FLOOR(UNIX_TIMESTAMP(timestamp)/1800),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_90_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/1800),userid);
 replace into [user_job_summary_year] (select FLOOR(UNIX_TIMESTAMP(timestamp)/10800),userid, sum(totalJobs), sum(dataLocalMaps), sum(rackLocalMaps), sum(remoteMaps), sum(mapInputBytes), sum(reduceOutputRecords), sum(mapSlotHours), sum(reduceSlotHours), sum(totalMaps), sum(totalReduces) from [user_job_summary_week] where timestamp between '[past_540_minutes]' and '[now]' group by FLOOR(UNIX_TIMESTAMP(timestamp)/10800),userid);

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java Thu Jun 18 05:01:02 2009
@@ -74,18 +74,18 @@
     return contents.toString();
   }
 
-  public void process(long start, long end, String query) {
+  public void process(long start, long end, String query) throws Throwable {
     try {
       Macro macroProcessor = new Macro(start, end, query);
       query = macroProcessor.toString();
       db.execute(query);
-    } catch (Exception e) {
-      log.error(query);
-      log.error(ExceptionUtil.getStackTrace(e));
+    } catch(Exception e) {
+      log.error("Query: "+query);
+      throw new Exception("Aggregation failed for: "+query);
     }
   }
 
-  public void process(String query) {
+  public void process(String query) throws Throwable {
     long start = current;
     long end = current;
     process(current, current, query);
@@ -130,10 +130,14 @@
         } else {
           Aggregator dba = new Aggregator();
           long start = Calendar.getInstance().getTimeInMillis();
-          if(startTime!=0 && endTime!=0) {
-            dba.process(startTime, startTime, query[i]);
-          } else {
-            dba.process(query[i]);
+          try {
+            if(startTime!=0 && endTime!=0) {
+              dba.process(startTime, startTime, query[i]);
+            } else {
+              dba.process(query[i]);
+            }
+          } catch(Throwable e) {
+            log.error("Invalid query:"+query[i]);
           }
           long end = Calendar.getInstance().getTimeInMillis();
           long duration = end - start;

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/TableCreator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/TableCreator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/TableCreator.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/TableCreator.java Thu Jun 18 05:01:02 2009
@@ -28,6 +28,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 
 public class TableCreator {
   private static DatabaseConfig dbc = null;
@@ -39,74 +40,70 @@
     }
   }
 
-  public void createTables() {
+  public void createTables() throws Exception {
     long now = (new Date()).getTime();
     createTables(now, now);
   }
 
-  public void createTables(long start, long end) {
+  public void createTables(long start, long end) throws Exception {
     String cluster = System.getProperty("CLUSTER");
     if (cluster == null) {
       cluster = "unknown";
     }
     DatabaseWriter dbw = new DatabaseWriter(cluster);
-    try {
-      HashMap<String, String> dbNames = dbc.startWith("report.db.name.");
-      Iterator<String> ki = dbNames.keySet().iterator();
-      while (ki.hasNext()) {
-        String name = ki.next();
-        String tableName = dbNames.get(name);
-        String[] tableList = dbc.findTableName(tableName, start, end);
-        log.debug("table name: " + tableList[0]);
-        try {
-          String[] parts = tableList[0].split("_");
-          int partition = Integer.parseInt(parts[parts.length - 2]);
-          String table = "";
-          for (int i = 0; i < parts.length - 2; i++) {
-            if (i != 0) {
-              table = table + "_";
-            }
-            table = table + parts[i];
-          }
-          String query = "show create table " + table + "_template;";
-          ResultSet rs = dbw.query(query);
-          while (rs.next()) {
-            log.debug("table schema: " + rs.getString(2));
-            query = rs.getString(2);
-            log.debug("template table name:" + table + "_template");
-            log.debug("replacing with table name:" + table + "_" + partition
-                + "_" + parts[parts.length - 1]);
-            log.debug("creating table: " + query);
-            String createPartition = query.replaceFirst(table + "_template",
-                table + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
-            partition++;
-            createPartition = query.replaceFirst(table + "_template", table
-                + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
-            partition++;
-            createPartition = query.replaceFirst(table + "_template", table
-                + "_" + partition + "_" + parts[parts.length - 1]);
-            createPartition = createPartition.replaceFirst("TABLE",
-                "TABLE IF NOT EXISTS");
-            dbw.execute(createPartition);
+    HashMap<String, String> dbNames = dbc.startWith("report.db.name.");
+    Iterator<String> ki = dbNames.keySet().iterator();
+    while (ki.hasNext()) {
+      String name = ki.next();
+      String tableName = dbNames.get(name);
+      String[] tableList = dbc.findTableName(tableName, start, end);
+      log.debug("table name: " + tableList[0]);
+      try {
+        String[] parts = tableList[0].split("_");
+        int partition = Integer.parseInt(parts[parts.length - 2]);
+        String table = "";
+        for (int i = 0; i < parts.length - 2; i++) {
+          if (i != 0) {
+            table = table + "_";
           }
-        } catch (NumberFormatException e) {
-          log.error("Error in parsing table partition number, skipping table:"
-              + tableList[0]);
-        } catch (ArrayIndexOutOfBoundsException e) {
-          log.debug("Skipping table:" + tableList[0]
-              + ", because it has no partition configuration.");
-        } catch (SQLException e) {
-
+          table = table + parts[i];
+        }
+        String query = "show create table " + table + "_template;";
+        ResultSet rs = dbw.query(query);
+        while (rs.next()) {
+          log.debug("table schema: " + rs.getString(2));
+          query = rs.getString(2);
+          log.debug("template table name:" + table + "_template");
+          log.debug("replacing with table name:" + table + "_" + partition
+              + "_" + parts[parts.length - 1]);
+          log.debug("creating table: " + query);
+          String createPartition = query.replaceFirst(table + "_template",
+              table + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
+          partition++;
+          createPartition = query.replaceFirst(table + "_template", table
+              + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
+          partition++;
+          createPartition = query.replaceFirst(table + "_template", table
+              + "_" + partition + "_" + parts[parts.length - 1]);
+          createPartition = createPartition.replaceFirst("TABLE",
+              "TABLE IF NOT EXISTS");
+          dbw.execute(createPartition);
         }
+      } catch (NumberFormatException e) {
+        log.error("Error in parsing table partition number, skipping table:"
+            + tableList[0]);
+      } catch (ArrayIndexOutOfBoundsException e) {
+        log.debug("Skipping table:" + tableList[0]
+            + ", because it has no partition configuration.");
+      } catch (SQLException e) {
+        throw e;
       }
-    } catch (Exception e) {
-      e.printStackTrace();
     }
   }
 
@@ -132,7 +129,11 @@
         usage();
       }
     } else {
-      tc.createTables();
+      try {
+        tc.createTables();
+      } catch (Exception e) {
+        log.error(ExceptionUtil.getStackTrace(e));
+      }
     }
 
   }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/util/DatabaseWriter.java Thu Jun 18 05:01:02 2009
@@ -82,7 +82,7 @@
     }
   }
 
-  public void execute(String query) {
+  public void execute(String query) throws SQLException {
     try {
       stmt = conn.createStatement();
       stmt.execute(query);
@@ -93,6 +93,7 @@
       log.error("SQLException: " + ex.getMessage());
       log.error("SQLState: " + ex.getSQLState());
       log.error("VendorError: " + ex.getErrorCode());
+      throw ex;
     } finally {
       if (stmt != null) {
         try {

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/DatabaseSetup.java Thu Jun 18 05:01:02 2009
@@ -20,6 +20,7 @@
 import java.io.*;
 import java.sql.*;
 import java.util.*;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
 import org.apache.hadoop.chukwa.database.TableCreator;
 
@@ -29,7 +30,7 @@
   public String cluster = "demo";
   public long current = Calendar.getInstance().getTimeInMillis();
 
-  public void setUpDatabase() {
+  public void setUpDatabase() throws Exception {
     System.setProperty("CLUSTER","demo");
     DatabaseWriter db = new DatabaseWriter(cluster);
     String buffer = "";

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java Thu Jun 18 05:01:02 2009
@@ -37,7 +37,11 @@
     public DatabaseSetup dbSetup = new DatabaseSetup();
 
     public void setUp() {
+      try{
 	dbSetup.setUpDatabase();
+      } catch (Exception e) {
+        fail(ExceptionUtil.getStackTrace(e));
+      }
     }
 
     public void tearDown() {

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java Thu Jun 18 05:01:02 2009
@@ -40,7 +40,7 @@
   String cluster = "demo";
   long current = Calendar.getInstance().getTimeInMillis();
 
-  public void setUp() {
+  public void setUp() throws Exception {
     System.setProperty("CLUSTER","demo");
     DatabaseWriter db = new DatabaseWriter(cluster);
     String buffer = "";

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseIostat.java Thu Jun 18 05:01:02 2009
@@ -73,6 +73,7 @@
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
 import org.apache.hadoop.chukwa.database.Macro;
 
 import junit.framework.TestCase;
@@ -177,7 +178,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data for database table: "+table);
+        }
       }
     }
     db.close();
@@ -185,7 +190,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables."+ExceptionUtil.getStackTrace(e));
+      }
     }    
   }
 

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabasePrepareStatement.java Thu Jun 18 05:01:02 2009
@@ -50,7 +50,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data from table:"+table);
+        }
       }
     }
     db.close();
@@ -58,7 +62,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables.");
+      }
     }
   }
 
@@ -73,7 +81,7 @@
         list.add(table);
       }
       for(String table : list) {
-//        db.execute("drop table "+table);
+        db.execute("drop table "+table);
       }
     } catch(Throwable ex) {
     } finally {

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java Thu Jun 18 05:01:02 2009
@@ -50,7 +50,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data for database table:"+table);
+        }
       }
     }
     db.close();
@@ -58,7 +62,12 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        e.printStackTrace();
+        fail("Fail to create database tables.");
+      }
     }
   }
 

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/dataloader/TestDatabaseMetricDataLoader.java Thu Jun 18 05:01:02 2009
@@ -54,7 +54,11 @@
     String tables[] = buffer.split(";");
     for(String table : tables) {
       if(table.length()>5) {
-        db.execute(table);
+        try {
+          db.execute(table);
+        } catch (Exception e) {
+          fail("Fail to retrieve meta data from database table: "+table);
+        }
       }
     }
     db.close();
@@ -62,7 +66,11 @@
       TableCreator tc = new TableCreator();
       long start = current;
       long end = current + (timeWindow[i]*1440*60*1000);
-      tc.createTables(start, end);
+      try {
+        tc.createTables(start, end);
+      } catch (Exception e) {
+        fail("Fail to create database tables.");
+      }
     }
   }
 
@@ -107,7 +115,8 @@
   }
 
   public void testMetricDataLoader() {
-    String srcDir = System.getenv("CHUKWA_DATA_DIR");
+    boolean skip=false;
+    String srcDir = System.getenv("CHUKWA_DATA_DIR") + File.separator + "samples";
     try {
       ChukwaConfiguration conf = new ChukwaConfiguration();
       FileSystem fs = FileSystem.get(conf);
@@ -116,29 +125,34 @@
         MetricDataLoader mdl = new MetricDataLoader(conf, fs, sequenceFile.getPath().toUri().toString());
         mdl.call();
       }
+      if(sources.length==0) {
+        skip=true;
+      }
     } catch (Throwable ex) {
       fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
     }
-    DatabaseWriter db = new DatabaseWriter(cluster);
-    for(int i=0;i<tables.length;i++) {
-      String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
-      Macro mp = new Macro(current,query);
-      query = mp.toString();
-      try {
-        ResultSet rs = db.query(query);
-        ResultSetMetaData rsmd = rs.getMetaData();
-        int numberOfColumns = rsmd.getColumnCount();
-        while(rs.next()) {
-          for(int j=1;j<=numberOfColumns;j++) {
-            assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains no data.",rs.getString(j)!=null);
+    if(!skip) {
+      DatabaseWriter db = new DatabaseWriter(cluster);
+      for(int i=0;i<tables.length;i++) {
+        String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
+        Macro mp = new Macro(current,query);
+        query = mp.toString();
+        try {
+          ResultSet rs = db.query(query);
+          ResultSetMetaData rsmd = rs.getMetaData();
+          int numberOfColumns = rsmd.getColumnCount();
+          while(rs.next()) {
+            for(int j=1;j<=numberOfColumns;j++) {
+              assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains no data.",rs.getString(j)!=null);
+            }
           }
+        } catch(Throwable ex) {
+          fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
         }
-      } catch(Throwable ex) {
-        fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
       }
+      db.close();
+      assertTrue("MetricDataLoader executed successfully.",true);
     }
-    db.close();
-    assertTrue("MetricDataLoader executed successfully.",true);
   }
 
 }

Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java?rev=785904&r1=785903&r2=785904&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/rest/DatabaseRestServerSetup.java Thu Jun 18 05:01:02 2009
@@ -84,7 +84,7 @@
 	  return cluster;
   }
 
-  protected void setUp() {
+  protected void setUp() throws Exception {
 	  dbSetup.setUpDatabase();
 	  String cluster = getCluster();
 	  DatabaseWriter dbw = new DatabaseWriter(cluster);



Mime
View raw message