chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r767726 - in /hadoop/chukwa/trunk: ./ conf/ src/java/org/apache/hadoop/chukwa/database/ src/test/org/apache/hadoop/chukwa/database/ src/test/org/apache/hadoop/chukwa/extraction/database/
Date Wed, 22 Apr 2009 23:51:26 GMT
Author: eyang
Date: Wed Apr 22 23:51:25 2009
New Revision: 767726

URL: http://svn.apache.org/viewvc?rev=767726&view=rev
Log:
CHUKWA-174. Added test cases to test database partitioning, database aggregation, and data
loading. (Eric Yang)

Added:
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/
    hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/TestDatabaseMetricDataLoader.java
Modified:
    hadoop/chukwa/trunk/conf/jdbc.conf.template
    hadoop/chukwa/trunk/default.properties
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
    hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Macro.java

Modified: hadoop/chukwa/trunk/conf/jdbc.conf.template
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/conf/jdbc.conf.template?rev=767726&r1=767725&r2=767726&view=diff
==============================================================================
--- hadoop/chukwa/trunk/conf/jdbc.conf.template (original)
+++ hadoop/chukwa/trunk/conf/jdbc.conf.template Wed Apr 22 23:51:25 2009
@@ -1 +1 @@
-unknown=jdbc:://localhost:3306/demo?user=example
+demo=jdbc:mysql://localhost:3306/demo?user=root

Modified: hadoop/chukwa/trunk/default.properties
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/default.properties?rev=767726&r1=767725&r2=767726&view=diff
==============================================================================
--- hadoop/chukwa/trunk/default.properties (original)
+++ hadoop/chukwa/trunk/default.properties Wed Apr 22 23:51:25 2009
@@ -17,4 +17,5 @@
 hadoop.conf=/usr/local/hadoop
 rpm.uid=chukwa
 rpm.gid=users
+test.exclude=TestDatabase*
 CHUKWA_LOG_DIR=/usr/local/chukwa/var/log

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java?rev=767726&r1=767725&r2=767726&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Aggregator.java Wed Apr
22 23:51:25 2009
@@ -79,7 +79,6 @@
     try {
       Macro macroProcessor = new Macro(start, end, query);
       query = macroProcessor.toString();
-      log.info(query);
       db.execute(query);
     } catch (Exception e) {
       log.error(query);
@@ -93,6 +92,10 @@
     process(current, current, query);
   }
 
+  public void setWriter(DatabaseWriter dbw) {
+    db = dbw;
+  }
+
   public static void main(String[] args) {
     long startTime = 0;
     long endTime = 0;

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Macro.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Macro.java?rev=767726&r1=767725&r2=767726&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Macro.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/database/Macro.java Wed Apr 22 23:51:25
2009
@@ -116,6 +116,12 @@
                         first=false;
                     } else if(type==java.sql.Types.TIMESTAMP) {
                         // Skip the column
+                        if(macro.indexOf("sum(")==0) {
+                            meta=meta+"sum("+name+")";	            			
+                        } else {
+                            meta=meta+"avg("+name+")";
+                        }
+                        first=false;
                     } else {
                         if(macro.indexOf("sum(")==0) {
                             meta=meta+"SUM("+name+")";

Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java?rev=767726&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
(added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseAggregator.java
Wed Apr 22 23:51:25 2009
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.database;
+
+import junit.framework.TestCase;
+import java.util.Calendar;
+import org.apache.hadoop.chukwa.database.Macro;
+import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.hadoop.chukwa.database.Aggregator;
+import org.apache.hadoop.chukwa.database.TableCreator;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+public class TestDatabaseAggregator extends TestCase {
+
+  long[] timeWindow = {7, 30, 91, 365, 3650};
+  String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
+  String cluster = "demo";
+  long current = Calendar.getInstance().getTimeInMillis();
+
+  public void setUp() {
+    System.setProperty("CLUSTER","demo");
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    String buffer = "";
+    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+                 + File.separator + "database_create_tables.sql");
+    buffer = readFile(aFile);
+    String tables[] = buffer.split(";");
+    for(String table : tables) {
+      if(table.length()>5) {
+        db.execute(table);
+      }
+    }
+    db.close();
+    for(int i=0;i<timeWindow.length;i++) {
+      TableCreator tc = new TableCreator();
+      long start = current;
+      long end = current + (timeWindow[i]*1440*60*1000);
+      tc.createTables(start, end);
+    }
+  }
+
+  public void tearDown() {
+    DatabaseWriter db = null;
+    try {
+      db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query("show tables");
+      ArrayList<String> list = new ArrayList<String>();
+      while(rs.next()) {
+        String table = rs.getString(1);
+        list.add(table);
+      }
+      for(String table : list) {
+        db.execute("drop table "+table);
+      }
+    } catch(Throwable ex) {
+    } finally {
+      if(db!=null) {
+        db.close();
+      }
+    }
+  }
+
+  public void verifyTable(String table) {
+    ChukwaConfiguration cc = new ChukwaConfiguration();
+    String query = "select * from ["+table+"];";
+    Macro mp = new Macro(current,query);
+    query = mp.toString();
+    try {
+      DatabaseWriter db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query(query);
+      while(rs.next()) {
+        int i = 1;
+        String value = rs.getString(i);
+      }
+      db.close();
+    } catch(SQLException ex) {
+      fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+    }
+  }
+
+  public String readFile(File aFile) {
+    StringBuffer contents = new StringBuffer();
+    try {
+      BufferedReader input = new BufferedReader(new FileReader(aFile));
+      try {
+        String line = null; // not declared within while loop
+        while ((line = input.readLine()) != null) {
+          contents.append(line);
+          contents.append(System.getProperty("line.separator"));
+        }
+      } finally {
+        input.close();
+      }
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+    return contents.toString();
+  }
+
+  public void testAggregator() {
+    Aggregator dba = new Aggregator();
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    dba.setWriter(db);
+    String queries = Aggregator.getContents(new File(System
+        .getenv("CHUKWA_CONF_DIR")
+        + File.separator + "aggregator.sql"));
+    String[] query = queries.split("\n");
+    for (int i = 0; i < query.length; i++) {
+      if(query[i].indexOf("#")==-1) {
+        try {
+          dba.process(query[i]);
+          assertTrue("Completed query: "+query[i],true);
+        } catch(Throwable ex) {
+          fail("Exception: "+ExceptionUtil.getStackTrace(ex));
+        }
+      }
+    }
+    db.close();
+  }
+
+}

Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java?rev=767726&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
(added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseDataExpiration.java
Wed Apr 22 23:51:25 2009
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.database;
+
+import junit.framework.TestCase;
+import java.util.Calendar;
+import org.apache.hadoop.chukwa.database.Macro;
+import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.hadoop.chukwa.database.Aggregator;
+import org.apache.hadoop.chukwa.database.TableCreator;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+public class TestDatabaseDataExpiration extends TestCase {
+
+  long[] timeWindow = {7, 30, 91, 365, 3650};
+  String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
+  String cluster = "demo";
+  long current = Calendar.getInstance().getTimeInMillis();
+
+  public void setUp() {
+    System.setProperty("CLUSTER","demo");
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    String buffer = "";
+    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+                 + File.separator + "database_create_tables.sql");
+    buffer = readFile(aFile);
+    String tables[] = buffer.split(";");
+    for(String table : tables) {
+      if(table.length()>5) {
+        db.execute(table);
+      }
+    }
+    db.close();
+    for(int i=0;i<timeWindow.length;i++) {
+      TableCreator tc = new TableCreator();
+      long start = current;
+      long end = current + (timeWindow[i]*1440*60*1000);
+      tc.createTables(start, end);
+    }
+  }
+
+  public void tearDown() {
+    DatabaseWriter db = null;
+    try {
+      db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query("show tables");
+      ArrayList<String> list = new ArrayList<String>();
+      while(rs.next()) {
+        String table = rs.getString(1);
+        list.add(table);
+      }
+      for(String table : list) {
+        db.execute("drop table "+table);
+      }
+    } catch(Throwable ex) {
+    } finally {
+      if(db!=null) {
+        db.close();
+      }
+    }
+  }
+
+  public void verifyTable(String table) {
+    ChukwaConfiguration cc = new ChukwaConfiguration();
+    String query = "select * from ["+table+"];";
+    Macro mp = new Macro(current,query);
+    query = mp.toString();
+    try {
+      DatabaseWriter db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query(query);
+      while(rs.next()) {
+        int i = 1;
+        String value = rs.getString(i);
+      }
+      db.close();
+    } catch(SQLException ex) {
+      fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+    }
+  }
+
+  public String readFile(File aFile) {
+    StringBuffer contents = new StringBuffer();
+    try {
+      BufferedReader input = new BufferedReader(new FileReader(aFile));
+      try {
+        String line = null; // not declared within while loop
+        while ((line = input.readLine()) != null) {
+          contents.append(line);
+          contents.append(System.getProperty("line.separator"));
+        }
+      } finally {
+        input.close();
+      }
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+    return contents.toString();
+  }
+
+  public void testDataExpiration() {
+    for(int i=0;i<timeWindow.length;i++) {
+      long start = current + (365*1440*60*1000);
+      long end = start + (timeWindow[i]*1440*60*1000);
+      try {
+        DataExpiration de = new DataExpiration();
+        de.dropTables(start, end);
+      } catch(Throwable ex) {
+        fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+      }
+      assertTrue("DataExpiration executed.", true);
+      DatabaseWriter db = null;
+      try {
+        db = new DatabaseWriter(cluster);
+        String query = "select * from [system_metrics];";
+        Macro mp = new Macro(current,query);
+        query = mp.toString();
+        ResultSet rs = db.query(query);
+      } catch(SQLException ex) {
+        assertTrue("Table is not suppose to exist.",true);
+        db.close();
+      }
+    } 
+  }
+
+}

Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java?rev=767726&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
(added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/database/TestDatabaseTableCreator.java
Wed Apr 22 23:51:25 2009
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.database;
+
+import junit.framework.TestCase;
+import java.util.Calendar;
+import org.apache.hadoop.chukwa.database.Macro;
+import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.hadoop.chukwa.database.Aggregator;
+import org.apache.hadoop.chukwa.database.TableCreator;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+public class TestDatabaseTableCreator extends TestCase {
+
+  long[] timeWindow = {7, 30, 91, 365, 3650};
+  String[] tables = {"system_metrics","disk","cluster_system_metrics","cluster_disk","mr_job","mr_task","dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
+  String cluster = "demo";
+  long current = Calendar.getInstance().getTimeInMillis();
+
+  public void setUp() {
+    System.setProperty("CLUSTER","demo");
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    String buffer = "";
+    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+                 + File.separator + "database_create_tables.sql");
+    buffer = readFile(aFile);
+    String tables[] = buffer.split(";");
+    for(String table : tables) {
+      if(table.length()>5) {
+        db.execute(table);
+      }
+    }
+    db.close();
+    for(int i=0;i<timeWindow.length;i++) {
+      TableCreator tc = new TableCreator();
+      long start = current;
+      long end = current + (timeWindow[i]*1440*60*1000);
+      tc.createTables(start, end);
+    }
+  }
+
+  public void tearDown() {
+    DatabaseWriter db = null;
+    try {
+      db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query("show tables");
+      ArrayList<String> list = new ArrayList<String>();
+      while(rs.next()) {
+        String table = rs.getString(1);
+        list.add(table);
+      }
+      for(String table : list) {
+        db.execute("drop table "+table);
+      }
+    } catch(Throwable ex) {
+    } finally {
+      if(db!=null) {
+        db.close();
+      }
+    }
+  }
+
+  public void verifyTable(String table) {
+    ChukwaConfiguration cc = new ChukwaConfiguration();
+    String query = "select * from ["+table+"];";
+    Macro mp = new Macro(current,query);
+    query = mp.toString();
+    try {
+      DatabaseWriter db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query(query);
+      while(rs.next()) {
+        int i = 1;
+        String value = rs.getString(i);
+      }
+      db.close();
+    } catch(SQLException ex) {
+      fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+    }
+  }
+
+  public String readFile(File aFile) {
+    StringBuffer contents = new StringBuffer();
+    try {
+      BufferedReader input = new BufferedReader(new FileReader(aFile));
+      try {
+        String line = null; // not declared within while loop
+        while ((line = input.readLine()) != null) {
+          contents.append(line);
+          contents.append(System.getProperty("line.separator"));
+        }
+      } finally {
+        input.close();
+      }
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+    return contents.toString();
+  }
+
+  public void testTableCreator() {
+    for(int i=0;i<timeWindow.length;i++) {
+      try {
+        DatabaseWriter db = new DatabaseWriter(cluster);
+        for(String table : tables) {
+          String query = "select * from ["+table+"];";
+          Macro mp = new Macro(current,query);
+          query = mp.toString();
+          ResultSet rs = db.query(query);
+          rs.last();
+          int count = rs.getRow();
+          assertTrue("Table should exist and return empty result.", count==0);
+        }
+        db.close();
+      } catch(SQLException ex) {
+        fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+      }
+    }
+  }
+
+  public void testTables() {
+    for(String table : tables) {
+      verifyTable(table);
+      assertTrue("Table verified: " + table, true);
+    }
+  }
+
+}

Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/TestDatabaseMetricDataLoader.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/TestDatabaseMetricDataLoader.java?rev=767726&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/TestDatabaseMetricDataLoader.java
(added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/extraction/database/TestDatabaseMetricDataLoader.java
Wed Apr 22 23:51:25 2009
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.extraction.database;
+
+import junit.framework.TestCase;
+import java.util.Calendar;
+import org.apache.hadoop.chukwa.database.Macro;
+import org.apache.hadoop.chukwa.util.DatabaseWriter;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.chukwa.util.ExceptionUtil;
+import org.apache.hadoop.chukwa.extraction.database.MetricDataLoader;
+import org.apache.hadoop.chukwa.database.TableCreator;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+
+public class TestDatabaseMetricDataLoader extends TestCase {
+
+  long[] timeWindow = {7, 30, 91, 365, 3650};
+  String[] tables = {"system_metrics","disk","mr_job","mr_task"}; //,"dfs_namenode","dfs_datanode","dfs_fsnamesystem","dfs_throughput","hadoop_jvm","hadoop_mapred","hdfs_usage"};
+  String cluster = "demo";
+  long current = Calendar.getInstance().getTimeInMillis();
+
+  public void setUp() {
+    System.setProperty("CLUSTER","demo");
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    String buffer = "";
+    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
+                 + File.separator + "database_create_tables.sql");
+    buffer = readFile(aFile);
+    String tables[] = buffer.split(";");
+    for(String table : tables) {
+      if(table.length()>5) {
+        db.execute(table);
+      }
+    }
+    db.close();
+    for(int i=0;i<timeWindow.length;i++) {
+      TableCreator tc = new TableCreator();
+      long start = current;
+      long end = current + (timeWindow[i]*1440*60*1000);
+      tc.createTables(start, end);
+    }
+  }
+
+  public void tearDown() {
+    DatabaseWriter db = null;
+    try {
+      db = new DatabaseWriter(cluster);
+      ResultSet rs = db.query("show tables");
+      ArrayList<String> list = new ArrayList<String>();
+      while(rs.next()) {
+        String table = rs.getString(1);
+        list.add(table);
+      }
+      for(String table : list) {
+        db.execute("drop table "+table);
+      }
+    } catch(Throwable ex) {
+    } finally {
+      if(db!=null) {
+        db.close();
+      }
+    }
+  }
+
+  public String readFile(File aFile) {
+    StringBuffer contents = new StringBuffer();
+    try {
+      BufferedReader input = new BufferedReader(new FileReader(aFile));
+      try {
+        String line = null; // not declared within while loop
+        while ((line = input.readLine()) != null) {
+          contents.append(line);
+          contents.append(System.getProperty("line.separator"));
+        }
+      } finally {
+        input.close();
+      }
+    } catch (IOException ex) {
+      ex.printStackTrace();
+    }
+    return contents.toString();
+  }
+
+  public void testMetricDataLoader() {
+    String srcDir = System.getenv("CHUKWA_DATA_DIR");
+    try {
+      ChukwaConfiguration conf = new ChukwaConfiguration();
+      FileSystem fs = FileSystem.get(conf);
+      FileStatus[] sources = fs.listStatus(new Path(srcDir));
+      MetricDataLoader mdl = new MetricDataLoader(cluster);
+      for (FileStatus sequenceFile : sources) {
+        mdl.process(sequenceFile.getPath());
+      }
+    } catch (Throwable ex) {
+      fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
+    }
+    DatabaseWriter db = new DatabaseWriter(cluster);
+    for(int i=0;i<tables.length;i++) {
+      String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
+      Macro mp = new Macro(current,query);
+      query = mp.toString();
+      try {
+        ResultSet rs = db.query(query);
+        ResultSetMetaData rsmd = rs.getMetaData();
+        int numberOfColumns = rsmd.getColumnCount();
+        while(rs.next()) {
+          for(int j=1;j<=numberOfColumns;j++) {
+            assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains
no data.",rs.getString(j)!=null);
+          }
+        }
+      } catch(Throwable ex) {
+        fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
+      }
+    }
+    db.close();
+    assertTrue("MetricDataLoader executed successfully.",true);
+  }
+
+}



Mime
View raw message