hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omal...@apache.org
Subject svn commit: r685353 [11/13] - in /hadoop/core/trunk: ./ src/contrib/chukwa/ src/contrib/chukwa/bin/ src/contrib/chukwa/build/ src/contrib/chukwa/conf/ src/contrib/chukwa/dist/ src/contrib/chukwa/docs/ src/contrib/chukwa/docs/paper/ src/contrib/chukwa/h...
Date Tue, 12 Aug 2008 22:35:23 GMT
Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource;
+
+public class DataSourceException extends Exception
+{
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = -3648370237965886781L;
+
+	public DataSourceException()
+	{}
+
+	public DataSourceException(String message)
+	{
+		super(message);
+	}
+
+	public DataSourceException(Throwable cause)
+	{
+		super(cause);
+	}
+
+	public DataSourceException(String message, Throwable cause)
+	{
+		super(message, cause);
+	}
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource;
+
+import java.util.HashMap;
+
+import org.apache.hadoop.chukwa.extraction.engine.datasource.database.DatabaseDS;
+import org.apache.hadoop.chukwa.extraction.engine.datasource.record.RecordDS;
+
+public class DataSourceFactory
+{
+	private static Object lock = new Object();
+	private static DataSourceFactory factory = null;
+	private HashMap<String, DataSource> dataSources = new HashMap<String, DataSource>();
+	
+	private DataSourceFactory()
+	{
+		// TODO load from config Name + class + threadSafe? 
+		
+		DataSource databaseDS = new DatabaseDS();
+		dataSources.put("MRJob", databaseDS);
+		dataSources.put("HodJob", databaseDS);
+		dataSources.put("QueueInfo", databaseDS);
+		
+		DataSource recordDS = new RecordDS();
+		dataSources.put("NameNode", recordDS);
+		dataSources.put("ChukwaLocalAgent", recordDS);
+	}
+	
+	public static DataSourceFactory getInstance()
+	{
+		synchronized(lock)
+		{
+			if ( factory == null)
+			{
+				factory = new DataSourceFactory();
+			}
+		}
+		return factory;
+	}
+	
+	public DataSource getDataSource(String datasourceName)
+	throws DataSourceException
+	{
+		if (dataSources.containsKey(datasourceName))
+		{
+			return dataSources.get(datasourceName);
+		}
+		else
+		{
+			DataSource hsdfsDS = new RecordDS();
+			dataSources.put(datasourceName, hsdfsDS);
+			return hsdfsDS;
+			//TODO proto only!
+			// throw new DataSourceException("Unknown datasource");
+		}	
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+public class DsDirectory
+{
+	private static Object lock = new Object();
+	private static DsDirectory dsDirectory = null;
+	private static final String[] emptyArray = new String[0];
+	
+	
+	private String rootFolder = null;
+	private DataConfig dataConfig = null;
+	
+	private static FileSystem fs = null;
+	private static Configuration conf = null;
+	
+	private DsDirectory()
+	{
+		dataConfig = new DataConfig();
+		conf = new Configuration();
+		try
+		{
+			fs = FileSystem.get(conf);
+		} catch (IOException e)
+		{
+			e.printStackTrace();
+		}
+		rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
+	}
+	
+	public static DsDirectory getInstance()
+	{
+		synchronized(lock)
+		{
+			if (dsDirectory == null)
+			{
+				dsDirectory = new DsDirectory();
+			}
+		}
+		return dsDirectory;
+	}
+	
+	public String[] list(String cluster)
+	throws DataSourceException
+	{
+		List<String> datasources = new ArrayList<String>();
+		try
+		{
+			FileStatus[] fileStat = fs.listStatus(new Path(rootFolder+cluster));
+			
+			for (FileStatus fstat : fileStat)
+			{
+				if (fstat.isDir())
+				{
+					datasources.add(fstat.getPath().getName());
+				}
+			}
+		} 
+		catch (IOException e)
+		{
+			e.printStackTrace();
+			throw new DataSourceException(e);
+		}
+		return datasources.toArray(emptyArray);
+	}
+	
+	public static void main(String[] args) throws DataSourceException
+	{
+		DsDirectory dsd = DsDirectory.getInstance();
+		String[] dss = dsd.list("localhost");
+		for (String d : dss)
+		{
+			System.out.println(d);
+		}
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Log Event Prototype 
+// From event_viewer.jsp
+package org.apache.hadoop.chukwa.extraction.engine.datasource.database;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.SimpleDateFormat;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
+import org.apache.hadoop.chukwa.hicc.ClusterConfig;
+
+public class DatabaseDS implements DataSource
+{
+		
+	public SearchResult search(SearchResult result, String cluster,
+			String dataSource, long t0, long t1, String filter)
+			throws DataSourceException
+	{
+		SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
+		String timeField = null;
+		TreeMap<Long, List<Record>> records = result.getRecords();
+		
+		 if(cluster==null)
+		 {
+			   cluster="mithrilgold";
+		  }
+		
+		if (dataSource.equalsIgnoreCase("MRJob"))
+		{
+			timeField = "LAUNCH_TIME";
+		}
+		else  if (dataSource.equalsIgnoreCase("HodJob"))
+		{
+			timeField = "StartTime";
+		}
+		else if (dataSource.equalsIgnoreCase("QueueInfo"))
+		{
+			timeField = "timestamp";
+		}
+		else
+		{
+			timeField = "timestamp";
+		}
+		String startS = formatter.format(t0);
+	    String endS = formatter.format(t1);
+	    Statement stmt = null;
+	    ResultSet rs = null;
+	    try
+	    {
+	    	String dateclause = timeField + " >= '" + startS 
+	    		+ "' and " + timeField + " <= '" + endS + "'";
+	    	
+		       ClusterConfig cc = new ClusterConfig();
+		       String jdbc = cc.getURL(cluster);
+		       
+			   Connection conn = DriverManager.getConnection(jdbc);
+			   
+			   stmt = conn.createStatement();
+			   String query = "";
+			   query = "select * from "+dataSource+" where "+dateclause+";";
+			   rs = stmt.executeQuery(query);
+			   if (stmt.execute(query)) 
+			   {
+			       rs = stmt.getResultSet();
+			       ResultSetMetaData rmeta = rs.getMetaData();
+			       int col = rmeta.getColumnCount();
+			       while (rs.next()) 
+			       {
+			    	   ChukwaRecord event = new ChukwaRecord();
+					   String cell="";
+					   long timestamp = 0;
+					   
+					   for(int i=1;i<col;i++)
+					   {
+					       String value = rs.getString(i);
+					       if(value!=null) 
+					       {
+						   cell=cell+" "+rmeta.getColumnName(i)+":"+value;
+					       }
+					       if(rmeta.getColumnName(i).equals(timeField)) 
+					       {
+					    	   timestamp = rs.getLong(i);
+					    	   event.setTime(timestamp);
+					       }
+					   }
+					   boolean isValid = false;
+					   if(filter == null || filter.equals("")) 
+					   {
+						   isValid = true;
+					   }
+					   else if (cell.indexOf(filter) > 0)
+					   {
+						   isValid = true;
+					   }
+					   if (!isValid)
+					   { continue; }
+					   
+					   event.add(Record.bodyField, cell);
+					   event.add(Record.sourceField, cluster + "." + dataSource );
+					   if (records.containsKey(timestamp))
+					   {
+						   records.get(timestamp).add(event);
+					   }
+					   else
+					   {
+						   List<Record> list = new LinkedList<Record>();
+						   list.add(event);
+						   records.put(event.getTime(), list);
+					   }     
+			       }
+			   }
+	    }
+	    catch (SQLException e)
+	    {
+	    	e.printStackTrace();
+	    	throw new DataSourceException(e);
+	    }
+	    finally 
+	    {
+	    	  if (rs != null) {
+			       try {
+				   rs.close();
+			       } catch (SQLException sqlEx) {
+				   // ignore
+			       }
+			       rs = null;
+			   }
+			   if (stmt != null) {
+			       try {
+				   stmt.close();
+			       } catch (SQLException sqlEx) {
+				   // ignore
+			       }
+			       stmt = null;
+			   }
+	    }
+		return result;
+	}
+
+	public boolean isThreadSafe()
+	{
+		return true;
+	}
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+
+
+public class ChukwaFileParser
+{
+	static final int timestampField = 0;
+		
+	
+	@SuppressWarnings("deprecation")
+	public static  List<Record> readData(String cluster,String dataSource,int maxRows,long t1, long t0,
+			long maxOffset,String filter,String fileName,FileSystem fs ) throws
+			MalformedFileFormat
+	{
+	
+		//String source = "NameNode." + fileName;
+		List<Record> records = new LinkedList<Record>();
+		FSDataInputStream dataIS = null;
+		int lineCount = 0;
+		
+		try
+		{
+			
+			if (!fs.exists(new Path(fileName)))
+			{
+				System.out.println("fileName not there!");
+				return records;
+			}
+			System.out.println("NameNodeParser Open [" +fileName + "]");
+			
+			dataIS = fs.open(new Path(fileName));
+			System.out.println("NameNodeParser Open2 [" +fileName + "]");
+			
+			long timestamp = 0;
+			int listSize = 0;
+			String line = null;
+			String[] data = null;
+			long offset = 0;
+			
+			
+			do
+			{
+				offset = dataIS.getPos();
+				
+				// Need TODO something here
+//				if (offset > maxOffset)
+//				{
+//					break;
+//				}
+				
+				line = dataIS.readLine();
+				lineCount ++;
+//				System.out.println("NameNodeParser Line [" +line + "]");	
+				if (line != null)
+				{
+					
+					//empty lines
+					if (line.length() < 14)
+					{
+//						System.out.println("NameNodeParser Line < 14! [" +line + "]");
+						continue;
+					}
+//					System.out.println("Line [" +line + "]");
+					data = line.split("\t");// Default separator for TextOutputFormat!
+					
+					try
+					{
+						timestamp = Long.parseLong(data[timestampField]);
+						
+					} catch (Exception e)
+					{
+						e.printStackTrace();
+						//throw new MalformedFileFormat(e);
+					}
+					if (timestamp < t0) 
+					{
+//						 System.out.println("Line not in range. Skipping: " +line);
+//						 System.out.println("Search for: " + new Date(t0) + " is :" + new Date(timestamp));
+						 continue;
+					} 
+					else if ((timestamp < t1) && (offset < maxOffset )) //JB (epochTS < maxDate)
+					{
+						
+//						System.out.println("In Range: " + line);
+						boolean valid = false;
+						
+						 if ( (filter == null || filter.equals("") ))
+						 {
+							 valid = true;
+						 }
+						 else if (line.indexOf(filter) > 0)
+						   {
+							   valid = true;
+						   }
+						 
+						 if (valid)
+						 {
+//							System.out.println("In Range In Filter: " + line);
+							ChukwaRecord record = new ChukwaRecord();
+							record.setTime(timestamp);
+							record.add("offset", ""+offset);
+							record.add(Record.bodyField, data[1]);
+							record.add(Record.sourceField, dataSource);
+							
+							records.add(record);
+							listSize = records.size();
+							if (listSize > maxRows)
+							{
+								records.remove(0);
+//								System.out.println("==========>>>>>REMOVING: " + e);
+							}
+						 }
+						else 
+						{
+//							System.out.println("In Range ==================>>>>>>>>> OUT Regex: " + line);
+						}
+
+					}
+					else
+					{
+//						 System.out.println("Line out of range. Stopping now: " +line);
+						break;
+					}
+				}
+
+			} while (line != null);			
+		}
+		catch(Exception e)
+		{
+			e.printStackTrace();
+		}
+		finally
+		{
+			System.out.println("File: " +fileName +" Line count: " + lineCount);
+			try
+			{dataIS.close();} 
+			catch (IOException e)
+			{}
+		}
+		return records;
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaSequenceFileParser.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaSequenceFileParser.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaSequenceFileParser.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaSequenceFileParser.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
+
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Date;
+
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+
+public class ChukwaSequenceFileParser
+{
+
+	public static  List<Record> readData(String cluster,String dataSource,int maxRows,long t1, long t0,
+			long maxOffset,String filter,String fileName,FileSystem fs,Configuration conf  ) throws
+			MalformedFileFormat
+	{
+	
+		//String source = "NameNode." + fileName;
+		List<Record> records = new LinkedList<Record>();
+		SequenceFile.Reader r= null;
+		
+		int lineCount = 0;
+		if (filter != null)
+			{ filter = filter.toLowerCase();}
+		
+		try
+		{
+			
+			if (!fs.exists(new Path(fileName)))
+			{
+				System.out.println("fileName not there!");
+				return records;
+			}
+			System.out.println("NameNodeParser Open [" +fileName + "]");
+			
+			r= new SequenceFile.Reader(fs, new Path(fileName), conf);
+			System.out.println("NameNodeParser Open2 [" +fileName + "]");
+			
+			long timestamp = 0;
+			int listSize = 0;
+
+			long offset = 0;
+			
+//			HdfsWriter.HdfsWriterKey key = new HdfsWriter.HdfsWriterKey();
+			Text key = new Text();
+
+		    ChukwaRecord evt = new ChukwaRecord();
+			while(r.next(key, evt))
+			{	
+				lineCount ++;
+				
+				System.out.println("NameNodeParser Line [" +evt.getValue(Record.bodyField) + "]");	
+				
+				if (evt != null)
+				{
+					timestamp = evt.getTime();
+					if (timestamp < t0) 
+					{
+						 System.out.println("Line not in range. Skipping: " +evt.getValue(Record.bodyField));
+						 System.out.println("Search for: " + new Date(t0) + " is :" + new Date(timestamp));
+						 continue;
+					} 
+					else if ((timestamp < t1) && (offset < maxOffset )) //JB (epochTS < maxDate)
+					{
+						
+						System.out.println("In Range: " + evt.getValue(Record.bodyField));
+						boolean valid = false;
+						
+						 if ( (filter == null || filter.equals("") ))
+						 {
+							 valid = true;
+						 }
+						 else if (evt.getValue(Record.rawField).toLowerCase().indexOf(filter) > 0)
+						   {
+ System.out.println("MATCH " +  filter + "===========================>>>>>>>" + evt.getValue(Record.rawField));
+							   valid = true;
+						   }
+						 
+						 if (valid)
+						 {
+							records.add(evt);
+evt = new ChukwaRecord();
+							listSize = records.size();
+							if (listSize > maxRows)
+							{
+								records.remove(0);
+								System.out.println("==========>>>>>REMOVING: " + evt.getValue(Record.bodyField));
+							}
+						 }
+						else 
+						{
+							System.out.println("In Range ==================>>>>>>>>> OUT Regex: " + evt.getValue(Record.bodyField));
+						}
+
+					}
+					else
+					{
+						 System.out.println("Line out of range. Stopping now: " +evt.getValue(Record.bodyField));
+						break;
+					}
+				}
+
+			}
+		}
+		catch(Exception e)
+		{
+			e.printStackTrace();
+		}
+		finally
+		{
+			System.out.println("File: " +fileName +" Line count: " + lineCount);
+			if (r != null)
+			{
+				try
+				{r.close();} 
+				catch (IOException e)
+				{}	
+			}
+			
+		}
+		return records;
+	}
+	
+	public static void main(String[] args) throws Throwable
+	{
+		Configuration conf = new Configuration();
+
+	    FileSystem fs = FileSystem.get(conf);//FileSystem.get(new URI(fsURL), conf);
+	    Calendar c = Calendar.getInstance();
+	    c.add(Calendar.MONTH, -2);
+	    
+	    ChukwaSequenceFileParser.readData(	"/tmp/t1", "NameNode",
+	    									200, new java.util.Date().getTime(), 
+	    									c.getTimeInMillis(), Long.MAX_VALUE, null, 
+	    									args[0], fs, conf);
+	    
+	    SequenceFile.Reader r= new SequenceFile.Reader(fs, new Path(args[0]), conf);
+	    Text key = new Text();
+	    
+	    ChukwaRecord evt = new ChukwaRecord();
+	    while(r.next(key, evt))
+	    {
+	      System.out.println( evt);
+	    }
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/MalformedFileFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/MalformedFileFormat.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/MalformedFileFormat.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/MalformedFileFormat.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
+
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
+
+public class MalformedFileFormat extends DataSourceException
+{
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = 2180898410952691571L;
+
+	public MalformedFileFormat()
+	{
+		super();
+	}
+
+	public MalformedFileFormat(String message, Throwable cause)
+	{
+		super(message, cause);
+	}
+
+	public MalformedFileFormat(String message)
+	{
+		super(message);
+	}
+
+	public MalformedFileFormat(Throwable cause)
+	{
+		super(cause);
+	}
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/RecordDS.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/RecordDS.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/RecordDS.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/RecordDS.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.hadoop.chukwa.extraction.engine.ChukwaSearchResult;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
+import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
+import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
+import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
+import org.apache.hadoop.fs.FileSystem;
+
+public class RecordDS implements DataSource
+{
+	
+	private static FileSystem fs = null;
+	private static ChukwaConfiguration conf = null;
+	
+	private static String rootFolder = null;
+	private static DataConfig dataConfig = null;
+	
+	static
+	{
+		dataConfig = new DataConfig();
+		rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
+		conf = new ChukwaConfiguration();
+		try
+		{
+			fs = FileSystem.get(conf);
+		} catch (IOException e)
+		{
+			e.printStackTrace();
+		}
+	}
+	
+	public SearchResult search(
+									SearchResult result, 
+									String cluster,
+									String dataSource, 
+									long t0, 
+									long t1, 
+									String filter)
+			throws DataSourceException
+	{
+		
+		String filePath = rootFolder + "/" +  
+				cluster + "/" + dataSource;
+		
+		System.out.println("filePath [" + filePath + "]");	
+		Calendar calendar = Calendar.getInstance();
+		calendar.setTimeInMillis(t1);
+		
+		TreeMap<Long, List<Record>> records = result.getRecords();
+		int maxCount = 200;
+		
+		do
+		{
+			System.out.println("start Date [" + calendar.getTime() + "]");
+			String fileName = new java.text.SimpleDateFormat("_yyyy_MM_dd_HH").format(calendar.getTime());
+			int minutes = calendar.get(Calendar.MINUTE);
+			int dec = minutes/10;
+			fileName += "_" + dec ;
+			
+			int m = minutes - (dec*10);
+			if (m < 5)
+			{ fileName += "0.evt";}
+			else
+			{ fileName += "5.evt";}
+			
+			fileName = filePath + "/" + dataSource + fileName;
+			
+			//System.out.println("JB fileName  [" +fileName + "]");
+			
+			
+			try
+			{
+				System.out.println("BEFORE fileName  [" +fileName + "]");
+				
+//				List<Record> evts = ChukwaFileParser.readData(cluster,dataSource,maxCount, t1, t0, Long.MAX_VALUE, filter, fileName, fs);
+				List<Record> evts = ChukwaSequenceFileParser.readData(cluster,dataSource,maxCount, t1, t0, Long.MAX_VALUE, filter, fileName, fs,conf);
+				
+				maxCount = maxCount - evts.size();
+				System.out.println("AFTER fileName  [" +fileName + "] count=" + evts.size() + " maxCount=" + maxCount);
+				for (Record evt : evts)
+				{
+					System.out.println("AFTER Loop  [" +evt.toString() + "]");
+					long timestamp = evt.getTime();
+					if (records.containsKey(timestamp))
+					   {
+						records.get(timestamp).add(evt);
+					   }
+					   else
+					   {
+						   List<Record> list = new LinkedList<Record>();
+						   list.add(evt);
+						   records.put(timestamp, list);
+					   }   
+				}
+			}
+			catch (Exception e)
+			{
+				e.printStackTrace();
+			}
+			
+			if (maxCount <= 0)
+			{
+				System.out.println("BREAKING LOOP AFTER [" +fileName + "] maxCount=" + maxCount);
+				break;
+			}
+			
+			calendar.add(Calendar.MINUTE, -5);
+			
+			System.out.println("calendar  [" +calendar.getTimeInMillis() + "] ");
+			System.out.println("end       [" +(t0-1000*60*5 ) + "] ");
+		} while (calendar.getTimeInMillis() > (t0-1000*60*5 )); // <= need some code here
+		// Need more than this to compute the end
+		
+		
+
+		
+		return result;
+	}
+
+	
+	public static void main(String[] args) throws DataSourceException
+	{
+		long t1 = 0;
+		long t0 = 0;
+		System.out.println("Hello");
+		Calendar calendar = Calendar.getInstance();
+		Date d1;
+		try
+		{
+			d1 = new java.text.SimpleDateFormat ("dd/MM/yyyy HH:mm:ss").parse("05/06/2008 19:31:05");
+			calendar.setTime(d1);
+			t1 = calendar.getTimeInMillis();
+			d1 = new java.text.SimpleDateFormat ("dd/MM/yyyy HH:mm:ss").parse("05/06/2008 19:26:05");
+			calendar.setTime(d1);
+			t0 = calendar.getTimeInMillis();
+			
+		} catch (ParseException e)
+		{
+			e.printStackTrace();
+			throw new RuntimeException(e);
+		}
+		
+		String filter = null;
+		RecordDS dao = new RecordDS();
+		SearchResult result = new ChukwaSearchResult();
+		
+		TreeMap<Long, List<Record>> records = new TreeMap<Long,List<Record>> ();
+		result.setRecords(records);
+		
+		dao.search(result,"output2","NameNode",t0,t1,filter);
+	}
+	
+	public boolean isThreadSafe()
+	{
+		return true;
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ClusterConfig.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import java.io.*;
+import java.util.*;
+
+public class ClusterConfig {
+    public static HashMap<String, String> clusterMap = new HashMap<String, String>();
+    private String path=System.getenv("CHUKWA_HOME")+File.separator+"conf"+File.separator;
+    static public String getContents(File aFile) {
+        //...checks on aFile are elided
+        StringBuffer contents = new StringBuffer();
+   
+        try {
+          //use buffering, reading one line at a time
+          //FileReader always assumes default encoding is OK!
+          BufferedReader input =  new BufferedReader(new FileReader(aFile));
+          try {
+             String line = null; //not declared within while loop
+             /*
+              * readLine is a bit quirky :
+              * it returns the content of a line MINUS the newline.
+              * it returns null only for the END of the stream.
+              * it returns an empty String if two newlines appear in a row.
+              */
+             while (( line = input.readLine()) != null){
+                contents.append(line);
+                contents.append(System.getProperty("line.separator"));
+             }
+          } finally {
+             input.close();
+          }
+        }
+          catch (IOException ex){
+          ex.printStackTrace();
+        }
+
+        return contents.toString();
+    }
+
+    public ClusterConfig() {
+        File cc = new File(path+"jdbc.conf");
+        String buffer = getContents(cc);
+        String[] lines = buffer.split("\n");
+        for(String line: lines) {
+            String[] data = line.split("=",2);
+            clusterMap.put(data[0],data[1]);
+        }
+    }
+
+    public String getURL(String cluster) {
+        String url = clusterMap.get(cluster);
+        return url; 
+    }
+
+    public Iterator<String> getClusters() {
+        Set<String> keys = clusterMap.keySet();
+        Iterator<String> i = keys.iterator();
+        return i;
+    }    
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ColorPicker.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ColorPicker.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ColorPicker.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ColorPicker.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+public class ColorPicker {
+	private String color = "#ff5757";
+
+	public ColorPicker() {
+    	color = "#ff5757";
+    }
+	
+    public String get(int counter) {
+        if((counter % 6)==0) {
+            String cyan = Integer.toHexString(256-(counter % 255));
+            color = "#57"+cyan+cyan;
+        } else if((counter % 5)==0) {
+            String purple = Integer.toHexString(256-(counter % 255));
+            color = "#"+purple+"57"+purple;
+        } else if((counter % 4)==0) {
+            String yellow = Integer.toHexString(256-(counter % 255 * 20));
+            color = "#FF"+yellow+"00";
+        } else if((counter % 3)==0) {
+            String green = Integer.toHexString(256-(counter % 255));
+            color = "#57"+green+"57";
+        } else if((counter % 2)==0) {
+            String blue = Integer.toHexString(256-(counter % 255));
+            color = "#5757"+blue+"";
+        } else {
+            color = "#ff5757";
+        }
+        return this.color;
+    }
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Config.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Config.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Config.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Config.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+public class Config {
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/DatasetMapper.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import java.util.HashMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.sql.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class DatasetMapper {
+    private String jdbc;
+    private static Log log = LogFactory.getLog(DatasetMapper.class);
+    private HashMap<String, ArrayList<Double>> dataset;
+    private List<String> labels;
+	public DatasetMapper(String jdbc) {
+	    this.jdbc=jdbc;
+	    this.dataset = new HashMap<String, ArrayList<Double>>();
+	    this.labels = new ArrayList<String>();
+	}
+	public void execute(String query, boolean groupBySecondColumn) {
+		dataset.clear();
+	    try {
+	        // The newInstance() call is a work around for some
+	        // broken Java implementations
+	        Class.forName("com.mysql.jdbc.Driver").newInstance();
+	    } catch (Exception ex) {
+	        // handle the error
+	    }
+	    Connection conn = null;
+	    Statement stmt = null;
+	    ResultSet rs = null;
+	    int counter = 0;
+	    int size = 0;
+	    labels.clear();
+	    double max=0.0;
+	    int labelsCount=0;
+	    try {
+	        conn = DriverManager.getConnection(jdbc);
+	        stmt = conn.createStatement();
+	        //rs = stmt.executeQuery(query);
+	        if (stmt.execute(query)) {
+	            rs = stmt.getResultSet();
+	            ResultSetMetaData rmeta = rs.getMetaData();
+	            int col=rmeta.getColumnCount();
+	            int i=0;
+	            java.util.ArrayList<Double> data = null;
+	            HashMap<String, Integer> xAxisMap = new HashMap<String, Integer>();
+	            while (rs.next()) {
+	                String label = rs.getString(1);
+	                if(!xAxisMap.containsKey(label)) {
+	                    xAxisMap.put(label, i);
+	                    labels.add(label);
+	                    i++;
+	                }
+	                if(groupBySecondColumn) {
+	                    String item = rs.getString(2);
+	                    // Get the data from the row using the series column
+	                    double current = rs.getDouble(3);
+	                    if(current>max) {
+	                        max=current;
+	                    }
+	                    data = dataset.get(item);
+	                    if(data == null) {
+	                        data = new java.util.ArrayList<Double>();
+	                    }
+	                    data.add(rs.getDouble(3));
+	                    dataset.put(item,data);
+	                } else {
+	                    for(int j=2;j<=col;j++) {
+	                        String item = rmeta.getColumnName(j);
+	                        // Get the data from the row using the column name
+	                        double current = rs.getDouble(j);
+	                        if(current>max) {
+	                            max=current;
+	                        }
+	                        data = dataset.get(item);
+	                        if(data == null) {
+	                            data = new java.util.ArrayList<Double>();
+	                        }
+	                        data.add(rs.getDouble(j));
+	                        dataset.put(item,data);
+	                    }
+	                }
+	            }
+	            labelsCount=i;
+	        } else {
+	                log.error("query is not executed.");
+	        }
+	        // Now do something with the ResultSet ....
+	    } catch (SQLException ex) {
+	        // handle any errors
+	        log.error("SQLException: " + ex.getMessage());
+	        log.error("SQLState: " + ex.getSQLState());
+	        log.error("VendorError: " + ex.getErrorCode());
+	    } catch (Exception ex) {
+	    } finally {
+	        // it is a good idea to release
+	        // resources in a finally{} block
+	        // in reverse-order of their creation
+	        // if they are no-longer needed
+	        if (rs != null) {
+	            try {
+	                rs.close();
+	            } catch (SQLException sqlEx) {
+	                // ignore
+	            }
+	            rs = null;
+	        }
+	        if (stmt != null) {
+	            try {
+	                stmt.close();
+	            } catch (SQLException sqlEx) {
+	                // ignore
+	            }
+	            stmt = null;
+	        }
+	        if (conn != null) {
+	            try {
+	                conn.close();
+	            } catch (SQLException sqlEx) {
+	                // ignore
+	            }
+	            conn = null;
+	        }
+	    }
+	}
+	public List<String> getXAxisMap() {
+		return labels;
+	}
+	public HashMap<String, ArrayList<Double>> getDataset() {
+		return dataset;
+	}
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/JSONLoader.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import java.net.*;
+import java.io.*;
+import org.json.*;
+
+public class JSONLoader {
+    public JSONArray jsonData;
+    static public String getContents(String source) {
+        //...checks on aFile are elided
+        StringBuffer contents = new StringBuffer();
+
+        try {
+          //use buffering, reading one line at a time
+          //FileReader always assumes default encoding is OK!
+	  URL yahoo = new URL(source);
+	  BufferedReader in = new BufferedReader(
+				new InputStreamReader(
+				yahoo.openStream()));
+
+	  String inputLine;
+
+	  while ((inputLine = in.readLine()) != null) {
+                contents.append(inputLine);
+                contents.append(System.getProperty("line.separator"));
+          }
+          in.close();
+        } catch (IOException ex){
+          ex.printStackTrace();
+        }
+
+        return contents.toString();
+    }
+
+    public JSONLoader(String source) {
+        String buffer = getContents(source);
+        try {
+            JSONObject rows = new JSONObject(buffer);
+            jsonData = new JSONArray((String)rows.get("rows").toString());
+        } catch (JSONException e) {
+        }
+    }
+
+    public String getTS(int i) {
+        String ts = null;
+        try {
+            ts = ((JSONObject)((JSONArray)jsonData).get(i)).get("ts").toString();
+        } catch (JSONException e) {
+        }
+        return ts; 
+    }
+    
+    public String getTags(int i) {
+        String tags = null;
+        try {
+            tags = ((JSONObject)((JSONArray)jsonData).get(i)).get("tags").toString();
+        } catch (JSONException e) {
+        }
+        return tags;
+    }
+
+    public String getValue(int i) {
+        String value = null;
+        try {
+            value = ((JSONObject)((JSONArray)jsonData).get(i)).get("value").toString();
+        } catch (JSONException e) {
+        }
+        return value;
+    }
+
+    public int length() {
+        return ((JSONArray)jsonData).length();
+    }
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/TimeHandler.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import javax.servlet.http.*;
+import java.util.Calendar;
+import java.util.TimeZone;
+import java.text.SimpleDateFormat;
+
+public class TimeHandler {
+	private HttpSession session = null;
+	private HttpServletRequest request = null;
+	private TimeZone tz = null;
+    private long start = 0;
+    private long end = 0;
+    private String startDate = null;
+    private String startHour = null;
+    private String startMin = null;
+    private String endDate = null;
+    private String endHour = null;
+    private String endMin = null;
+    private String startS = null;
+    private String endS = null;
+    
+    public TimeHandler(HttpServletRequest request) {
+    	this.tz=TimeZone.getTimeZone("UTC");
+    	init(request);
+    }
+    
+    public TimeHandler(HttpServletRequest request, String tz) {
+    	this.tz=TimeZone.getTimeZone(tz);
+    	init(request);
+    }
+    
+    public void init(HttpServletRequest request) {
+    	this.session = request.getSession();
+    	this.request = request;
+    	String timeType = (String)session.getAttribute("time_type");
+    	if((request.getParameter("period")!=null && !request.getParameter("period").equals("")) || (timeType!=null && timeType.equals("last"))) {
+            String period = request.getParameter("period");
+        	if(period == null) {
+                period = (String) session.getAttribute("period");
+                if(period == null) {
+                    period = "last1hr";
+                    session.setAttribute("period",period);
+                }
+            }
+            Calendar now = Calendar.getInstance();
+            this.start = now.getTimeInMillis();
+            this.end = now.getTimeInMillis();            
+        	if(period.equals("last1hr")) {
+                start = end - (60*60*1000);
+            } else if(period.equals("last2hr")) {
+                start = end - (2*60*60*1000);
+            } else if(period.equals("last3hr")) {
+                start = end - (3*60*60*1000);
+            } else if(period.equals("last6hr")) {
+                start = end - (6*60*60*1000);
+            } else if(period.equals("last12hr")) {
+                start = end - (12*60*60*1000);
+            } else if(period.equals("last24hr")) {
+                start = end - (24*60*60*1000);
+            } else if(period.equals("last7d")) {
+                start = end - (7*24*60*60*1000);
+            } else if(period.equals("last30d")) {
+                start = end - (30*24*60*60*1000);
+            }
+        	if(request.getParameter("time_type")!=null && request.getParameter("time_type").equals("range")) {
+                session.setAttribute("start", ""+start);
+                session.setAttribute("end", ""+end);
+        	}
+        } else {
+            start = Long.parseLong((String) session.getAttribute("start"));
+            end = Long.parseLong((String) session.getAttribute("end"));        	
+        }
+        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
+        SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd");
+        SimpleDateFormat formatHour = new SimpleDateFormat("HH");
+        SimpleDateFormat formatMin = new SimpleDateFormat("mm");
+        
+        formatter.setTimeZone(this.tz);
+        formatDate.setTimeZone(this.tz);
+        formatHour.setTimeZone(this.tz);
+        formatMin.setTimeZone(this.tz);
+        
+        startS = formatter.format(start);
+        this.startDate = formatDate.format(start);
+        this.startHour = formatHour.format(start);
+        this.startMin = formatMin.format(start);
+        endS = formatter.format(end);
+        this.endDate = formatDate.format(end);
+        this.endHour = formatHour.format(end);
+        this.endMin = formatMin.format(end);
+    }
+
+    public String getStartDate() {
+        return this.startDate;        	
+    }
+        
+    public String getStartHour() {
+        return this.startHour;
+    }
+
+    public String getStartMinute() {
+        return this.startMin;
+    }
+
+    public String getStartTimeText() {
+    	return this.startS;
+    }
+    
+    public long getStartTime() {
+        return start;	
+    }
+    
+    public String getEndDate() {
+    	return this.endDate;
+    }
+
+    public String getEndHour() {
+        return this.endHour;
+    }
+
+    public String getEndMinute() {
+        return this.endMin;
+    }
+    
+    public String getEndTimeText() {
+    	return this.endS;
+    }
+
+    public long getEndTime() {
+        return end;	
+    }
+
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Views.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Views.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Views.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Views.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,125 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import java.io.*;
+import java.util.*;
+import org.json.*;
+
+public class Views {
+    public JSONArray viewsData;
+    private String path = System.getProperty("catalina.home")+"/webapps/hicc/views/workspace_view_list.cache";
+    static public String getContents(File aFile) {
+        //...checks on aFile are elided
+        StringBuffer contents = new StringBuffer();
+
+        try {
+          //use buffering, reading one line at a time
+          //FileReader always assumes default encoding is OK!
+          BufferedReader input =  new BufferedReader(new FileReader(aFile));
+          try {
+             String line = null; //not declared within while loop
+             /*
+              * readLine is a bit quirky :
+              * it returns the content of a line MINUS the newline.
+              * it returns null only for the END of the stream.
+              * it returns an empty String if two newlines appear in a row.
+              */
+             while (( line = input.readLine()) != null){
+                contents.append(line);
+                contents.append(System.getProperty("line.separator"));
+             }
+          } finally {
+             input.close();
+          }
+        }
+          catch (IOException ex){
+          ex.printStackTrace();
+        }
+
+        return contents.toString();
+    }
+    
+    public Views() {
+        File aFile = new File(path);
+        String buffer = getContents(aFile);
+        try {
+            viewsData = new JSONArray(buffer);
+        } catch (JSONException e) {
+        }
+    }
+
+    public String getOwner(int i) {
+        String owner = null;
+        try {
+            owner = ((JSONObject)((JSONArray)viewsData).get(i)).get("owner").toString();
+        } catch (JSONException e) {
+        }
+        return owner;
+    }
+    
+    public Iterator getPermission(int i) {
+        Iterator permission = null;
+        try {
+            permission = ((JSONObject)((JSONObject)((JSONArray)viewsData).get(i)).get("permission")).keys();
+        } catch (JSONException e) {
+        }
+        return permission;
+    }
+    
+    public String getReadPermission(int i, String who) {
+        String read = null;
+        try {
+            read = ((JSONObject)((JSONObject)((JSONObject)((JSONArray)viewsData).get(i)).get("permission")).get(who)).get("read").toString();
+        } catch (JSONException e) {
+        }
+        return read;
+    }
+
+    public String getWritePermission(int i, String who) {
+        String write = null;
+        try {
+            write = ((JSONObject)((JSONObject)((JSONObject)((JSONArray)viewsData).get(i)).get("permission")).get(who)).get("write").toString();
+        } catch (JSONException e) {
+        }
+        return write;
+    }
+    
+    public String getDescription(int i) {
+        String description = null;
+        try {
+            description = ((JSONObject)((JSONArray)viewsData).get(i)).get("description").toString();
+        } catch (JSONException e) {
+        }
+        return description;
+    }
+
+    public String getKey(int i) {
+        String key = null;
+        try {
+            key = ((JSONObject)((JSONArray)viewsData).get(i)).get("key").toString();
+        } catch (JSONException e) {
+        }
+        return key;
+    }
+
+    public int length() {
+        return ((JSONArray)viewsData).length();
+    }
+}

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/ViewsTag.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import javax.servlet.jsp.JspException;
+import javax.servlet.jsp.tagext.SimpleTagSupport;
+import java.io.IOException;
+import java.util.*;
+import org.apache.hadoop.chukwa.hicc.Views;
+
+public class ViewsTag extends SimpleTagSupport {
+    private String key = null;
+    private String owner = null;
+    private String description = null;
+    Views views = new Views();
+    public void doTag() throws JspException, IOException {
+        for(int i=0;i<views.length();i++) {
+                int j=0;
+                getJspContext().setAttribute( "key", views.getKey(i) );
+                Iterator permission = views.getPermission(i);
+                String[] authUsers = new String[100];
+                for ( Iterator perm = permission; perm.hasNext(); ) {
+                    String who = perm.next().toString();
+                    authUsers[j]=who;
+//                    getJspContext().setAttribute( "permission."+who+".read", views.getReadPermission(i,who) );
+//                    getJspContext().setAttribute( "permission."+who+".write", views.getWritePermission(i,who) );
+                    j=j+1;
+                }
+//                getJspContext().setAttribute( "permission", authUsers );
+                getJspContext().setAttribute( "owner", views.getOwner(i) );
+                getJspContext().setAttribute( "description", views.getDescription(i) );
+	        getJspBody().invoke(null);
+        }
+    }
+
+}
+

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Workspace.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Workspace.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Workspace.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/hicc/Workspace.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,353 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.hicc;
+
+import java.io.*;
+import java.util.*;
+import javax.servlet.*;
+import javax.servlet.http.*;
+import java.sql.*;
+import org.json.*;
+
+public class Workspace extends HttpServlet {
+
+    private String path=System.getProperty("catalina.home")+"/webapps/hicc";
+    private JSONObject hash=new JSONObject();
+    private String user="admin";
+
+    public void doGet(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        response.setContentType("text/html");
+        PrintWriter out = response.getWriter();
+        String method = request.getParameter("method");
+        if(method.equals("get_views_list")) {
+            getViewsList(request, response);
+        }
+        if(method.equals("get_view")) {
+            getView(request, response);
+        }
+        if(method.equals("save_view")) {
+            saveView(request, response);
+        }
+        if(method.equals("change_view_info")) {
+            changeViewInfo(request, response);
+        }
+        if(method.equals("get_widget_list")) {
+            getWidgetList(request, response);
+        }
+        if(method.equals("clone_view")) {
+            cloneView(request, response);
+        }
+        if(method.equals("delete_view")) {
+            deleteView(request, response);
+        }
+    }
+
+    public void doPost(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        doGet(request, response);
+    }
+
+    static public String getContents(File aFile) {
+        //...checks on aFile are elided
+        StringBuffer contents = new StringBuffer();
+    
+        try {
+          //use buffering, reading one line at a time
+          //FileReader always assumes default encoding is OK!
+          BufferedReader input =  new BufferedReader(new FileReader(aFile));
+          try {
+             String line = null; //not declared within while loop
+             /*
+              * readLine is a bit quirky :
+              * it returns the content of a line MINUS the newline.
+              * it returns null only for the END of the stream.
+              * it returns an empty String if two newlines appear in a row.
+              */
+             while (( line = input.readLine()) != null){
+                contents.append(line);
+                contents.append(System.getProperty("line.separator"));
+             }
+          } finally {
+             input.close();
+          }
+        }
+          catch (IOException ex){
+          ex.printStackTrace();
+        }
+    
+        return contents.toString();
+    }
+
+    public void setContents(String fName, String buffer) {
+        try {
+            FileWriter fstream = new FileWriter(fName);
+            BufferedWriter out = new BufferedWriter(fstream);
+            out.write(buffer);
+            out.close();
+        } catch (Exception e) {
+            System.err.println("Error: "+e.getMessage());
+        }
+    }
+
+    public void cloneView(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String name = request.getParameter("name");
+        String template = request.getParameter("clone_name");
+        File aFile = new File(path+"/views/"+template);
+        String config = getContents(aFile);
+        int i=0;
+        boolean check=true;
+        while(check) {
+            String tmpName = name;
+            if(i>0) {
+                tmpName = name + i;
+            }
+            File checkFile = new File(path+"/views/"+tmpName+".view");
+            check = checkFile.exists();
+            if(!check) {
+                name =tmpName;
+            }
+            i=i+1;
+        }
+        setContents(path+"/views/"+name+".view",config);
+        File deleteCache = new File(path+"/views/workspace_view_list.cache");
+        deleteCache.delete();
+        genViewCache(path+"/views");
+        aFile = new File(path+"/views/workspace_view_list.cache");
+        String viewsCache = getContents(aFile);
+        out.println(viewsCache);
+    }
+    public void deleteView(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String name = request.getParameter("name");
+        File aFile = new File(path+"/views/"+name+".view");
+        aFile.delete();
+        File deleteCache = new File(path+"/views/workspace_view_list.cache");
+        deleteCache.delete();
+        genViewCache(path+"/views");
+    }
+    public void getViewsList(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String format = request.getParameter("format");
+        File aFile = new File(path+"/views/workspace_view_list.cache");
+        String viewsCache = getContents(aFile);
+        out.println(viewsCache);
+    }
+    public void getView(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String id = request.getParameter("id");
+        genViewCache(path+"/views");
+        File aFile = new File(path+"/views/"+id+".view");
+        String view = getContents(aFile);
+        out.println(view);
+    }
+    public void changeViewInfo(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String id = request.getParameter("name");
+        String config = request.getParameter("config");
+        try {
+            JSONObject jt = new JSONObject(config);
+            File aFile = new File(path+"/views/"+id+".view");
+            String original = getContents(aFile);
+            JSONObject updateObject = new JSONObject(original);
+            updateObject.put("description",jt.get("description"));
+            setContents(path+"/views/"+id+".view",updateObject.toString());
+            File deleteCache = new File(path+"/views/workspace_view_list.cache");
+            deleteCache.delete();
+            genViewCache(path+"/views");
+            out.println("Workspace is stored successfully.");
+        } catch(JSONException e) {
+            out.println("Workspace store failed.");
+        }
+    }
+    public void saveView(HttpServletRequest request,
+                      HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String id = request.getParameter("name");
+        String config = request.getParameter("config");
+        File aFile = new File(path+"/views/"+id+".view");
+        setContents(path+"/views/"+id+".view",config);
+        out.println("Workspace is stored successfully.");
+    }
+    public void getWidgetList(HttpServletRequest request,
+                             HttpServletResponse response)
+        throws IOException, ServletException
+    {
+        PrintWriter out = response.getWriter();
+        String format = request.getParameter("format");
+        genWidgetCache(path+"/descriptors");
+        File aFile = new File(path+"/descriptors/workspace_plugin.cache");
+        String viewsCache = getContents(aFile);
+        out.println(viewsCache);
+    }
+    private void genViewCache(String source) {
+        File cacheFile = new File(source+"/workspace_view_list.cache");
+        if(! cacheFile.exists()) {
+            File dir = new File(source);
+            File[] filesWanted = dir.listFiles(
+                new FilenameFilter() {
+                    public boolean accept(File dir, String name) {
+                    return name.endsWith(".view");
+               }
+            });
+            JSONObject[] cacheGroup = new JSONObject[filesWanted.length];
+            for(int i=0; i< filesWanted.length; i++) {
+               String buffer = getContents(filesWanted[i]);
+               try {
+                   JSONObject jt = new JSONObject(buffer);
+                   String fn = filesWanted[i].getName();
+                   jt.put("key", fn.substring(0,(fn.length()-5)));
+                   cacheGroup[i] = jt;
+               } catch (Exception e) {
+               }
+            }
+            String viewList = convertObjectsToViewList(cacheGroup);
+            setContents(source+"/workspace_view_list.cache", viewList);
+        }
+    }
+    public String convertObjectsToViewList(JSONObject[] objArray) {
+        JSONArray jsonArr = new JSONArray();
+        JSONObject permission = new JSONObject();
+        JSONObject user = new JSONObject();
+        try {
+            permission.put("read",1);
+            permission.put("modify",1);
+            user.put("all",permission);
+        } catch (Exception e) {
+               System.err.println("JSON Exception: "+e.getMessage());
+        }
+        for(int i=0;i<objArray.length;i++) {
+            try {
+               JSONObject jsonObj = new JSONObject();
+               jsonObj.put("key",objArray[i].get("key"));
+               jsonObj.put("description",objArray[i].get("description"));
+               jsonObj.put("owner","");
+               jsonObj.put("permission",user);
+               jsonArr.put(jsonObj);
+           } catch (Exception e) {
+               System.err.println("JSON Exception: "+e.getMessage());
+           }
+        }
+        return jsonArr.toString();
+    }
+    private void genWidgetCache(String source) {
+        File cacheFile = new File(source+"/workspace_plugin.cache");
+        File cacheDir = new File(source);
+        if(! cacheFile.exists() || cacheFile.lastModified()<cacheDir.lastModified()) {
+            File dir = new File(source);
+            File[] filesWanted = dir.listFiles(
+                new FilenameFilter() {
+                    public boolean accept(File dir, String name) {
+                    return name.endsWith(".descriptor");
+               }
+            });
+            JSONObject[] cacheGroup = new JSONObject[filesWanted.length];
+            for(int i=0; i< filesWanted.length; i++) {
+               String buffer = getContents(filesWanted[i]);
+               try {
+                   JSONObject jt = new JSONObject(buffer);
+                   cacheGroup[i] = jt;
+               } catch (Exception e) {
+               }
+            }
+            String widgetList = convertObjectsToWidgetList(cacheGroup);
+            setContents(source+"/workspace_plugin.cache", widgetList);
+        }
+    }
+    public String convertObjectsToWidgetList(JSONObject[] objArray) {
+        JSONObject jsonObj = new JSONObject();
+        JSONArray jsonArr = new JSONArray();
+        for(int i=0;i<objArray.length;i++) {
+            jsonArr.put(objArray[i]);
+        }
+        try {
+            jsonObj.put("detail", jsonArr);
+        } catch (Exception e) {
+            System.err.println("JSON Exception: "+e.getMessage());
+        }
+        JSONObject tmpHash = new JSONObject();
+        for(int i=0;i<objArray.length;i++) {
+            try {
+                String[] categoriesArray = objArray[i].get("categories").toString().split(",");
+                hash = addToHash(hash,categoriesArray,objArray[i]); 
+            } catch (JSONException e) {
+                System.err.println("JSON Exception: "+e.getMessage());
+            }
+        }
+        try {
+            jsonObj.put("children",hash);
+        } catch (Exception e) {
+            System.err.println("JSON Exception: "+e.getMessage());
+        }
+        return jsonObj.toString();
+    }
+    public JSONObject addToHash(JSONObject hash, String[] categoriesArray, JSONObject obj) {
+        JSONObject subHash=hash;
+        for(int i=0;i<categoriesArray.length;i++) {
+            String id = categoriesArray[i];
+            if(i>=categoriesArray.length-1) {
+                try {
+                    subHash.put("leaf:"+obj.get("title"),obj.get("id"));
+                } catch (Exception e) {
+                    System.err.println("JSON Exception: "+e.getMessage());
+                }
+            } else {
+                try {
+                    subHash=subHash.getJSONObject("node:"+id);
+                } catch (JSONException e) {
+                    try {
+                        JSONObject tmpHash = new JSONObject();
+                        subHash.put("node:"+id, tmpHash);
+                        subHash=tmpHash;
+                    } catch (JSONException ex) {
+                    }
+                }
+            }
+        }
+        return hash;
+    }
+    private JSONObject filterViewsByPermission(String userid, JSONObject viewArray) {
+        return viewArray;
+    }
+}
+
+
+

Added: hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/inputtools/ChukwaTTInstru.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/inputtools/ChukwaTTInstru.java?rev=685353&view=auto
==============================================================================
--- hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/inputtools/ChukwaTTInstru.java (added)
+++ hadoop/core/trunk/src/contrib/chukwa/src/java/org/apache/hadoop/chukwa/inputtools/ChukwaTTInstru.java Tue Aug 12 15:35:16 2008
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.chukwa.inputtools;
+
+import java.io.File;
+import java.util.Map;
+import java.util.HashMap;
+
+import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
+import org.apache.hadoop.mapred.*;
+
+/**
+ * An instrumentation plugin for Hadoop, to trigger Chukwa-based task logfile collection.
+ * 
+ * WARNING:  This code depends on hadoop features that have not yet been committed.
+ *   To allow it to compile, the key lines have been commented out, and marked with
+ *   'PENDING'.
+ *
+ */
+public class ChukwaTTInstru 
+extends TaskTrackerMetricsInst  //PENDING on getting new metrics code into Hadoop
+{
+
+  private Map<TaskAttemptID, Long> stdOutAdaptors;
+  private Map<TaskAttemptID, Long> stdErrAdaptors;
+  private ChukwaAgentController chukwa;
+  
+  public ChukwaTTInstru(TaskTracker t) {
+    super(t);  //PENDING
+    stdOutAdaptors = new HashMap<TaskAttemptID, Long>();
+    stdErrAdaptors = new HashMap<TaskAttemptID, Long>();
+    chukwa = new ChukwaAgentController();
+  }
+  
+  public void reportTaskLaunch(TaskAttemptID taskid, File stdout, File stderr)  {
+    long stdoutID = chukwa.addFile("unknown-userdata", stdout.getAbsolutePath());
+    long stderrID = chukwa.addFile("unknown-userdata", stderr.getAbsolutePath());
+    stdOutAdaptors.put(taskid, stdoutID);
+    stdErrAdaptors.put(taskid, stderrID);
+  }
+  
+  public void reportTaskEnd(TaskAttemptID taskid) {
+    try {
+      Long id = stdOutAdaptors.remove(taskid);
+      if(id != null)
+        chukwa.remove(id);
+      
+      id = stdErrAdaptors.remove(taskid);
+      if(id != null)
+        chukwa.remove(id);
+    } catch(java.io.IOException e) {
+      //failed to talk to chukwa.  Not much to be done.
+    }
+  }
+}



Mime
View raw message