chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From asrab...@apache.org
Subject svn commit: r752666 [10/16] - in /hadoop/chukwa/trunk: ./ src/java/org/apache/hadoop/chukwa/ src/java/org/apache/hadoop/chukwa/conf/ src/java/org/apache/hadoop/chukwa/database/ src/java/org/apache/hadoop/chukwa/datacollection/ src/java/org/apache/hadoo...
Date Wed, 11 Mar 2009 22:39:32 GMT
Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java Wed Mar 11 22:39:26 2009
@@ -19,49 +19,67 @@
 // File generated by hadoop record compiler. Do not edit.
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 public class ChukwaRecordJT extends org.apache.hadoop.record.Record {
   private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo;
   private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
   private static int[] _rio_rtiFilterFields;
   static {
-    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo("ChukwaRecordJT");
-    _rio_recTypeInfo.addField("time", org.apache.hadoop.record.meta.TypeID.LongTypeID);
-    _rio_recTypeInfo.addField("mapFields", new org.apache.hadoop.record.meta.MapTypeID(org.apache.hadoop.record.meta.TypeID.StringTypeID, org.apache.hadoop.record.meta.TypeID.BufferTypeID));
+    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo(
+        "ChukwaRecordJT");
+    _rio_recTypeInfo.addField("time",
+        org.apache.hadoop.record.meta.TypeID.LongTypeID);
+    _rio_recTypeInfo.addField("mapFields",
+        new org.apache.hadoop.record.meta.MapTypeID(
+            org.apache.hadoop.record.meta.TypeID.StringTypeID,
+            org.apache.hadoop.record.meta.TypeID.BufferTypeID));
   }
-  
+
   protected long time;
-  protected java.util.TreeMap<String,org.apache.hadoop.record.Buffer> mapFields;
-  public ChukwaRecordJT() { }
+  protected java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields;
+
+  public ChukwaRecordJT() {
+  }
+
   public ChukwaRecordJT(
-    final long time,
-    final java.util.TreeMap<String,org.apache.hadoop.record.Buffer> mapFields) {
+                        final long time,
+                        final java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields) {
     this.time = time;
     this.mapFields = mapFields;
   }
+
   public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
     return _rio_recTypeInfo;
   }
-  public static void setTypeFilter(org.apache.hadoop.record.meta.RecordTypeInfo rti) {
-    if (null == rti) return;
+
+  public static void setTypeFilter(
+      org.apache.hadoop.record.meta.RecordTypeInfo rti) {
+    if (null == rti)
+      return;
     _rio_rtiFilter = rti;
     _rio_rtiFilterFields = null;
   }
-  private static void setupRtiFields()
-  {
-    if (null == _rio_rtiFilter) return;
+
+  private static void setupRtiFields() {
+    if (null == _rio_rtiFilter)
+      return;
     // we may already have done this
-    if (null != _rio_rtiFilterFields) return;
+    if (null != _rio_rtiFilterFields)
+      return;
     int _rio_i, _rio_j;
-    _rio_rtiFilterFields = new int [_rio_rtiFilter.getFieldTypeInfos().size()];
-    for (_rio_i=0; _rio_i<_rio_rtiFilterFields.length; _rio_i++) {
+    _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
+    for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
       _rio_rtiFilterFields[_rio_i] = 0;
     }
-    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter.getFieldTypeInfos().iterator();
-    _rio_i=0;
+    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
+        .getFieldTypeInfos().iterator();
+    _rio_i = 0;
     while (_rio_itFilter.hasNext()) {
-      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter.next();
-      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo.getFieldTypeInfos().iterator();
-      _rio_j=1;
+      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
+          .next();
+      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
+          .getFieldTypeInfos().iterator();
+      _rio_j = 1;
       while (_rio_it.hasNext()) {
         org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
         if (_rio_tInfo.equals(_rio_tInfoFilter)) {
@@ -73,56 +91,68 @@
       _rio_i++;
     }
   }
+
   public long getTime() {
     return time;
   }
+
   public void setTime(final long time) {
-    this.time=time;
+    this.time = time;
   }
-  public java.util.TreeMap<String,org.apache.hadoop.record.Buffer> getMapFields() {
+
+  public java.util.TreeMap<String, org.apache.hadoop.record.Buffer> getMapFields() {
     return mapFields;
   }
-  public void setMapFields(final java.util.TreeMap<String,org.apache.hadoop.record.Buffer> mapFields) {
-    this.mapFields=mapFields;
+
+  public void setMapFields(
+      final java.util.TreeMap<String, org.apache.hadoop.record.Buffer> mapFields) {
+    this.mapFields = mapFields;
   }
-  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
-    _rio_a.startRecord(this,_rio_tag);
-    _rio_a.writeLong(time,"time");
+
+  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
+    _rio_a.startRecord(this, _rio_tag);
+    _rio_a.writeLong(time, "time");
     {
-      _rio_a.startMap(mapFields,"mapFields");
-      java.util.Set<java.util.Map.Entry<String,org.apache.hadoop.record.Buffer>> _rio_es1 = mapFields.entrySet();
-      for(java.util.Iterator<java.util.Map.Entry<String,org.apache.hadoop.record.Buffer>> _rio_midx1 = _rio_es1.iterator(); _rio_midx1.hasNext();) {
-        java.util.Map.Entry<String,org.apache.hadoop.record.Buffer> _rio_me1 = _rio_midx1.next();
+      _rio_a.startMap(mapFields, "mapFields");
+      java.util.Set<java.util.Map.Entry<String, org.apache.hadoop.record.Buffer>> _rio_es1 = mapFields
+          .entrySet();
+      for (java.util.Iterator<java.util.Map.Entry<String, org.apache.hadoop.record.Buffer>> _rio_midx1 = _rio_es1
+          .iterator(); _rio_midx1.hasNext();) {
+        java.util.Map.Entry<String, org.apache.hadoop.record.Buffer> _rio_me1 = _rio_midx1
+            .next();
         String _rio_k1 = _rio_me1.getKey();
         org.apache.hadoop.record.Buffer _rio_v1 = _rio_me1.getValue();
-        _rio_a.writeString(_rio_k1,"_rio_k1");
-        _rio_a.writeBuffer(_rio_v1,"_rio_v1");
+        _rio_a.writeString(_rio_k1, "_rio_k1");
+        _rio_a.writeBuffer(_rio_v1, "_rio_v1");
       }
-      _rio_a.endMap(mapFields,"mapFields");
+      _rio_a.endMap(mapFields, "mapFields");
     }
-    _rio_a.endRecord(this,_rio_tag);
+    _rio_a.endRecord(this, _rio_tag);
   }
-  private void deserializeWithoutFilter(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  private void deserializeWithoutFilter(
+      final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
+      throws java.io.IOException {
     _rio_a.startRecord(_rio_tag);
-    time=_rio_a.readLong("time");
+    time = _rio_a.readLong("time");
     {
       org.apache.hadoop.record.Index _rio_midx1 = _rio_a.startMap("mapFields");
-      mapFields=new java.util.TreeMap<String,org.apache.hadoop.record.Buffer>();
+      mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>();
       for (; !_rio_midx1.done(); _rio_midx1.incr()) {
         String _rio_k1;
-        _rio_k1=_rio_a.readString("_rio_k1");
+        _rio_k1 = _rio_a.readString("_rio_k1");
         org.apache.hadoop.record.Buffer _rio_v1;
-        _rio_v1=_rio_a.readBuffer("_rio_v1");
-        mapFields.put(_rio_k1,_rio_v1);
+        _rio_v1 = _rio_a.readBuffer("_rio_v1");
+        mapFields.put(_rio_k1, _rio_v1);
       }
       _rio_a.endMap("mapFields");
     }
     _rio_a.endRecord(_rio_tag);
   }
-  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
     if (null == _rio_rtiFilter) {
       deserializeWithoutFilter(_rio_a, _rio_tag);
       return;
@@ -130,55 +160,63 @@
     // if we're here, we need to read based on version info
     _rio_a.startRecord(_rio_tag);
     setupRtiFields();
-    for (int _rio_i=0; _rio_i<_rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
+    for (int _rio_i = 0; _rio_i < _rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
       if (1 == _rio_rtiFilterFields[_rio_i]) {
-        time=_rio_a.readLong("time");
-      }
-      else if (2 == _rio_rtiFilterFields[_rio_i]) {
+        time = _rio_a.readLong("time");
+      } else if (2 == _rio_rtiFilterFields[_rio_i]) {
         {
-          org.apache.hadoop.record.Index _rio_midx1 = _rio_a.startMap("mapFields");
-          mapFields=new java.util.TreeMap<String,org.apache.hadoop.record.Buffer>();
+          org.apache.hadoop.record.Index _rio_midx1 = _rio_a
+              .startMap("mapFields");
+          mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>();
           for (; !_rio_midx1.done(); _rio_midx1.incr()) {
             String _rio_k1;
-            _rio_k1=_rio_a.readString("_rio_k1");
+            _rio_k1 = _rio_a.readString("_rio_k1");
             org.apache.hadoop.record.Buffer _rio_v1;
-            _rio_v1=_rio_a.readBuffer("_rio_v1");
-            mapFields.put(_rio_k1,_rio_v1);
+            _rio_v1 = _rio_a.readBuffer("_rio_v1");
+            mapFields.put(_rio_k1, _rio_v1);
           }
           _rio_a.endMap("mapFields");
         }
-      }
-      else {
-        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>)(_rio_rtiFilter.getFieldTypeInfos());
-        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i).getFieldID(), typeInfos.get(_rio_i).getTypeID());
+      } else {
+        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
+            .getFieldTypeInfos());
+        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
+            .getFieldID(), typeInfos.get(_rio_i).getTypeID());
       }
     }
     _rio_a.endRecord(_rio_tag);
   }
-  public int compareTo (final Object _rio_peer_) throws ClassCastException {
+
+  public int compareTo(final Object _rio_peer_) throws ClassCastException {
     if (!(_rio_peer_ instanceof ChukwaRecordJT)) {
       throw new ClassCastException("Comparing different types of records.");
     }
     ChukwaRecordJT _rio_peer = (ChukwaRecordJT) _rio_peer_;
     int _rio_ret = 0;
-    _rio_ret = (time == _rio_peer.time)? 0 :((time<_rio_peer.time)?-1:1);
-    if (_rio_ret != 0) return _rio_ret;
+    _rio_ret = (time == _rio_peer.time) ? 0
+        : ((time < _rio_peer.time) ? -1 : 1);
+    if (_rio_ret != 0)
+      return _rio_ret;
     {
       java.util.Set<String> _rio_set10 = mapFields.keySet();
       java.util.Set<String> _rio_set20 = _rio_peer.mapFields.keySet();
       java.util.Iterator<String> _rio_miter10 = _rio_set10.iterator();
       java.util.Iterator<String> _rio_miter20 = _rio_set20.iterator();
-      for(; _rio_miter10.hasNext() && _rio_miter20.hasNext();) {
+      for (; _rio_miter10.hasNext() && _rio_miter20.hasNext();) {
         String _rio_k10 = _rio_miter10.next();
         String _rio_k20 = _rio_miter20.next();
         _rio_ret = _rio_k10.compareTo(_rio_k20);
-        if (_rio_ret != 0) { return _rio_ret; }
+        if (_rio_ret != 0) {
+          return _rio_ret;
+        }
       }
       _rio_ret = (_rio_set10.size() - _rio_set20.size());
     }
-    if (_rio_ret != 0) return _rio_ret;
+    if (_rio_ret != 0)
+      return _rio_ret;
     return _rio_ret;
   }
+
   public boolean equals(final Object _rio_peer_) {
     if (!(_rio_peer_ instanceof ChukwaRecordJT)) {
       return false;
@@ -188,118 +226,155 @@
     }
     ChukwaRecordJT _rio_peer = (ChukwaRecordJT) _rio_peer_;
     boolean _rio_ret = false;
-    _rio_ret = (time==_rio_peer.time);
-    if (!_rio_ret) return _rio_ret;
+    _rio_ret = (time == _rio_peer.time);
+    if (!_rio_ret)
+      return _rio_ret;
     _rio_ret = mapFields.equals(_rio_peer.mapFields);
-    if (!_rio_ret) return _rio_ret;
+    if (!_rio_ret)
+      return _rio_ret;
     return _rio_ret;
   }
+
   public Object clone() throws CloneNotSupportedException {
     ChukwaRecordJT _rio_other = new ChukwaRecordJT();
     _rio_other.time = this.time;
-    _rio_other.mapFields = (java.util.TreeMap<String,org.apache.hadoop.record.Buffer>) this.mapFields.clone();
+    _rio_other.mapFields = (java.util.TreeMap<String, org.apache.hadoop.record.Buffer>) this.mapFields
+        .clone();
     return _rio_other;
   }
+
   public int hashCode() {
     int _rio_result = 17;
     int _rio_ret;
-    _rio_ret = (int) (time^(time>>>32));
-    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_ret = (int) (time ^ (time >>> 32));
+    _rio_result = 37 * _rio_result + _rio_ret;
     _rio_ret = mapFields.hashCode();
-    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_result = 37 * _rio_result + _rio_ret;
     return _rio_result;
   }
+
   public static String signature() {
     return "LChukwaRecordJT(l{sB})";
   }
-  public static class Comparator extends org.apache.hadoop.record.RecordComparator {
+
+  public static class Comparator extends
+      org.apache.hadoop.record.RecordComparator {
     public Comparator() {
       super(ChukwaRecordJT.class);
     }
+
     static public int slurpRaw(byte[] b, int s, int l) {
       try {
         int os = s;
         {
           long i = org.apache.hadoop.record.Utils.readVLong(b, s);
           int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-          s+=z; l-=z;
+          s += z;
+          l -= z;
         }
         {
           int mi1 = org.apache.hadoop.record.Utils.readVInt(b, s);
           int mz1 = org.apache.hadoop.record.Utils.getVIntSize(mi1);
-          s+=mz1; l-=mz1;
-          for (int midx1 = 0; midx1 < mi1; midx1++) {{
+          s += mz1;
+          l -= mz1;
+          for (int midx1 = 0; midx1 < mi1; midx1++) {
+            {
               int i = org.apache.hadoop.record.Utils.readVInt(b, s);
               int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-              s+=(z+i); l-= (z+i);
+              s += (z + i);
+              l -= (z + i);
             }
             {
               int i = org.apache.hadoop.record.Utils.readVInt(b, s);
               int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-              s += z+i; l -= (z+i);
+              s += z + i;
+              l -= (z + i);
             }
           }
         }
         return (os - s);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    static public int compareRaw(byte[] b1, int s1, int l1,
-                                   byte[] b2, int s2, int l2) {
+
+    static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
+        int l2) {
       try {
         int os1 = s1;
         {
           long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1);
           long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2);
           if (i1 != i2) {
-            return ((i1-i2) < 0) ? -1 : 0;
+            return ((i1 - i2) < 0) ? -1 : 0;
           }
           int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
           int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
+          s1 += z1;
+          s2 += z2;
+          l1 -= z1;
+          l2 -= z2;
         }
         {
           int mi11 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
           int mi21 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
           int mz11 = org.apache.hadoop.record.Utils.getVIntSize(mi11);
           int mz21 = org.apache.hadoop.record.Utils.getVIntSize(mi21);
-          s1+=mz11; s2+=mz21; l1-=mz11; l2-=mz21;
-          for (int midx1 = 0; midx1 < mi11 && midx1 < mi21; midx1++) {{
+          s1 += mz11;
+          s2 += mz21;
+          l1 -= mz11;
+          l2 -= mz21;
+          for (int midx1 = 0; midx1 < mi11 && midx1 < mi21; midx1++) {
+            {
               int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
               int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
               int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
               int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-              s1+=z1; s2+=z2; l1-=z1; l2-=z2;
-              int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
-              if (r1 != 0) { return (r1<0)?-1:0; }
-              s1+=i1; s2+=i2; l1-=i1; l1-=i2;
+              s1 += z1;
+              s2 += z2;
+              l1 -= z1;
+              l2 -= z2;
+              int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1,
+                  b2, s2, i2);
+              if (r1 != 0) {
+                return (r1 < 0) ? -1 : 0;
+              }
+              s1 += i1;
+              s2 += i2;
+              l1 -= i1;
+              l1 -= i2;
             }
             {
               int i = org.apache.hadoop.record.Utils.readVInt(b1, s1);
               int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-              s1 += z+i; l1 -= (z+i);
+              s1 += z + i;
+              l1 -= (z + i);
             }
             {
               int i = org.apache.hadoop.record.Utils.readVInt(b2, s2);
               int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-              s2 += z+i; l2 -= (z+i);
+              s2 += z + i;
+              l2 -= (z + i);
             }
           }
-          if (mi11 != mi21) { return (mi11<mi21)?-1:0; }
+          if (mi11 != mi21) {
+            return (mi11 < mi21) ? -1 : 0;
+          }
         }
         return (os1 - s1);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    public int compare(byte[] b1, int s1, int l1,
-                         byte[] b2, int s2, int l2) {
-      int ret = compareRaw(b1,s1,l1,b2,s2,l2);
-      return (ret == -1)? -1 : ((ret==0)? 1 : 0);}
+
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+      int ret = compareRaw(b1, s1, l1, b2, s2, l2);
+      return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
+    }
   }
-  
+
   static {
-    org.apache.hadoop.record.RecordComparator.define(ChukwaRecordJT.class, new Comparator());
+    org.apache.hadoop.record.RecordComparator.define(ChukwaRecordJT.class,
+        new Comparator());
   }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java Wed Mar 11 22:39:26 2009
@@ -1,49 +1,63 @@
 // File generated by hadoop record compiler. Do not edit.
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 public class ChukwaRecordKey extends org.apache.hadoop.record.Record {
   private static final org.apache.hadoop.record.meta.RecordTypeInfo _rio_recTypeInfo;
   private static org.apache.hadoop.record.meta.RecordTypeInfo _rio_rtiFilter;
   private static int[] _rio_rtiFilterFields;
   static {
-    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo("ChukwaRecordKey");
-    _rio_recTypeInfo.addField("reduceType", org.apache.hadoop.record.meta.TypeID.StringTypeID);
-    _rio_recTypeInfo.addField("key", org.apache.hadoop.record.meta.TypeID.StringTypeID);
+    _rio_recTypeInfo = new org.apache.hadoop.record.meta.RecordTypeInfo(
+        "ChukwaRecordKey");
+    _rio_recTypeInfo.addField("reduceType",
+        org.apache.hadoop.record.meta.TypeID.StringTypeID);
+    _rio_recTypeInfo.addField("key",
+        org.apache.hadoop.record.meta.TypeID.StringTypeID);
   }
-  
+
   private String reduceType;
   private String key;
-  public ChukwaRecordKey() { }
-  public ChukwaRecordKey(
-    final String reduceType,
-    final String key) {
+
+  public ChukwaRecordKey() {
+  }
+
+  public ChukwaRecordKey(final String reduceType, final String key) {
     this.reduceType = reduceType;
     this.key = key;
   }
+
   public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() {
     return _rio_recTypeInfo;
   }
-  public static void setTypeFilter(org.apache.hadoop.record.meta.RecordTypeInfo rti) {
-    if (null == rti) return;
+
+  public static void setTypeFilter(
+      org.apache.hadoop.record.meta.RecordTypeInfo rti) {
+    if (null == rti)
+      return;
     _rio_rtiFilter = rti;
     _rio_rtiFilterFields = null;
   }
-  private static void setupRtiFields()
-  {
-    if (null == _rio_rtiFilter) return;
+
+  private static void setupRtiFields() {
+    if (null == _rio_rtiFilter)
+      return;
     // we may already have done this
-    if (null != _rio_rtiFilterFields) return;
+    if (null != _rio_rtiFilterFields)
+      return;
     int _rio_i, _rio_j;
-    _rio_rtiFilterFields = new int [_rio_rtiFilter.getFieldTypeInfos().size()];
-    for (_rio_i=0; _rio_i<_rio_rtiFilterFields.length; _rio_i++) {
+    _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()];
+    for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) {
       _rio_rtiFilterFields[_rio_i] = 0;
     }
-    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter.getFieldTypeInfos().iterator();
-    _rio_i=0;
+    java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter
+        .getFieldTypeInfos().iterator();
+    _rio_i = 0;
     while (_rio_itFilter.hasNext()) {
-      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter.next();
-      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo.getFieldTypeInfos().iterator();
-      _rio_j=1;
+      org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter
+          .next();
+      java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo
+          .getFieldTypeInfos().iterator();
+      _rio_j = 1;
       while (_rio_it.hasNext()) {
         org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next();
         if (_rio_tInfo.equals(_rio_tInfoFilter)) {
@@ -55,34 +69,42 @@
       _rio_i++;
     }
   }
+
   public String getReduceType() {
     return reduceType;
   }
+
   public void setReduceType(final String reduceType) {
-    this.reduceType=reduceType;
+    this.reduceType = reduceType;
   }
+
   public String getKey() {
     return key;
   }
+
   public void setKey(final String key) {
-    this.key=key;
+    this.key = key;
   }
-  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
-    _rio_a.startRecord(this,_rio_tag);
-    _rio_a.writeString(reduceType,"reduceType");
-    _rio_a.writeString(key,"key");
-    _rio_a.endRecord(this,_rio_tag);
+
+  public void serialize(final org.apache.hadoop.record.RecordOutput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
+    _rio_a.startRecord(this, _rio_tag);
+    _rio_a.writeString(reduceType, "reduceType");
+    _rio_a.writeString(key, "key");
+    _rio_a.endRecord(this, _rio_tag);
   }
-  private void deserializeWithoutFilter(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  private void deserializeWithoutFilter(
+      final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
+      throws java.io.IOException {
     _rio_a.startRecord(_rio_tag);
-    reduceType=_rio_a.readString("reduceType");
-    key=_rio_a.readString("key");
+    reduceType = _rio_a.readString("reduceType");
+    key = _rio_a.readString("key");
     _rio_a.endRecord(_rio_tag);
   }
-  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a, final String _rio_tag)
-  throws java.io.IOException {
+
+  public void deserialize(final org.apache.hadoop.record.RecordInput _rio_a,
+      final String _rio_tag) throws java.io.IOException {
     if (null == _rio_rtiFilter) {
       deserializeWithoutFilter(_rio_a, _rio_tag);
       return;
@@ -90,32 +112,36 @@
     // if we're here, we need to read based on version info
     _rio_a.startRecord(_rio_tag);
     setupRtiFields();
-    for (int _rio_i=0; _rio_i<_rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
+    for (int _rio_i = 0; _rio_i < _rio_rtiFilter.getFieldTypeInfos().size(); _rio_i++) {
       if (1 == _rio_rtiFilterFields[_rio_i]) {
-        reduceType=_rio_a.readString("reduceType");
-      }
-      else if (2 == _rio_rtiFilterFields[_rio_i]) {
-        key=_rio_a.readString("key");
-      }
-      else {
-        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>)(_rio_rtiFilter.getFieldTypeInfos());
-        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i).getFieldID(), typeInfos.get(_rio_i).getTypeID());
+        reduceType = _rio_a.readString("reduceType");
+      } else if (2 == _rio_rtiFilterFields[_rio_i]) {
+        key = _rio_a.readString("key");
+      } else {
+        java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter
+            .getFieldTypeInfos());
+        org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i)
+            .getFieldID(), typeInfos.get(_rio_i).getTypeID());
       }
     }
     _rio_a.endRecord(_rio_tag);
   }
-  public int compareTo (final Object _rio_peer_) throws ClassCastException {
+
+  public int compareTo(final Object _rio_peer_) throws ClassCastException {
     if (!(_rio_peer_ instanceof ChukwaRecordKey)) {
       throw new ClassCastException("Comparing different types of records.");
     }
     ChukwaRecordKey _rio_peer = (ChukwaRecordKey) _rio_peer_;
     int _rio_ret = 0;
     _rio_ret = reduceType.compareTo(_rio_peer.reduceType);
-    if (_rio_ret != 0) return _rio_ret;
+    if (_rio_ret != 0)
+      return _rio_ret;
     _rio_ret = key.compareTo(_rio_peer.key);
-    if (_rio_ret != 0) return _rio_ret;
+    if (_rio_ret != 0)
+      return _rio_ret;
     return _rio_ret;
   }
+
   public boolean equals(final Object _rio_peer_) {
     if (!(_rio_peer_ instanceof ChukwaRecordKey)) {
       return false;
@@ -126,53 +152,64 @@
     ChukwaRecordKey _rio_peer = (ChukwaRecordKey) _rio_peer_;
     boolean _rio_ret = false;
     _rio_ret = reduceType.equals(_rio_peer.reduceType);
-    if (!_rio_ret) return _rio_ret;
+    if (!_rio_ret)
+      return _rio_ret;
     _rio_ret = key.equals(_rio_peer.key);
-    if (!_rio_ret) return _rio_ret;
+    if (!_rio_ret)
+      return _rio_ret;
     return _rio_ret;
   }
+
   public Object clone() throws CloneNotSupportedException {
     ChukwaRecordKey _rio_other = new ChukwaRecordKey();
     _rio_other.reduceType = this.reduceType;
     _rio_other.key = this.key;
     return _rio_other;
   }
+
   public int hashCode() {
     int _rio_result = 17;
     int _rio_ret;
     _rio_ret = reduceType.hashCode();
-    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_result = 37 * _rio_result + _rio_ret;
     _rio_ret = key.hashCode();
-    _rio_result = 37*_rio_result + _rio_ret;
+    _rio_result = 37 * _rio_result + _rio_ret;
     return _rio_result;
   }
+
   public static String signature() {
     return "LChukwaRecordKey(ss)";
   }
-  public static class Comparator extends org.apache.hadoop.record.RecordComparator {
+
+  public static class Comparator extends
+      org.apache.hadoop.record.RecordComparator {
     public Comparator() {
       super(ChukwaRecordKey.class);
     }
+
     static public int slurpRaw(byte[] b, int s, int l) {
       try {
         int os = s;
         {
           int i = org.apache.hadoop.record.Utils.readVInt(b, s);
           int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-          s+=(z+i); l-= (z+i);
+          s += (z + i);
+          l -= (z + i);
         }
         {
           int i = org.apache.hadoop.record.Utils.readVInt(b, s);
           int z = org.apache.hadoop.record.Utils.getVIntSize(i);
-          s+=(z+i); l-= (z+i);
+          s += (z + i);
+          l -= (z + i);
         }
         return (os - s);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    static public int compareRaw(byte[] b1, int s1, int l1,
-                                   byte[] b2, int s2, int l2) {
+
+    static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2,
+        int l2) {
       try {
         int os1 = s1;
         {
@@ -180,33 +217,53 @@
           int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
           int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
           int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
-          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
-          if (r1 != 0) { return (r1<0)?-1:0; }
-          s1+=i1; s2+=i2; l1-=i1; l1-=i2;
+          s1 += z1;
+          s2 += z2;
+          l1 -= z1;
+          l2 -= z2;
+          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
+              s2, i2);
+          if (r1 != 0) {
+            return (r1 < 0) ? -1 : 0;
+          }
+          s1 += i1;
+          s2 += i2;
+          l1 -= i1;
+          l1 -= i2;
         }
         {
           int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1);
           int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2);
           int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1);
           int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2);
-          s1+=z1; s2+=z2; l1-=z1; l2-=z2;
-          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1,s1,i1,b2,s2,i2);
-          if (r1 != 0) { return (r1<0)?-1:0; }
-          s1+=i1; s2+=i2; l1-=i1; l1-=i2;
+          s1 += z1;
+          s2 += z2;
+          l1 -= z1;
+          l2 -= z2;
+          int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2,
+              s2, i2);
+          if (r1 != 0) {
+            return (r1 < 0) ? -1 : 0;
+          }
+          s1 += i1;
+          s2 += i2;
+          l1 -= i1;
+          l1 -= i2;
         }
         return (os1 - s1);
-      } catch(java.io.IOException e) {
+      } catch (java.io.IOException e) {
         throw new RuntimeException(e);
       }
     }
-    public int compare(byte[] b1, int s1, int l1,
-                         byte[] b2, int s2, int l2) {
-      int ret = compareRaw(b1,s1,l1,b2,s2,l2);
-      return (ret == -1)? -1 : ((ret==0)? 1 : 0);}
+
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+      int ret = compareRaw(b1, s1, l1, b2, s2, l2);
+      return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0);
+    }
   }
-  
+
   static {
-    org.apache.hadoop.record.RecordComparator.define(ChukwaRecordKey.class, new Comparator());
+    org.apache.hadoop.record.RecordComparator.define(ChukwaRecordKey.class,
+        new Comparator());
   }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchResult.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchResult.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchResult.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchResult.java Wed Mar 11 22:39:26 2009
@@ -18,34 +18,28 @@
 
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 import java.util.List;
 import java.util.TreeMap;
 
+public class ChukwaSearchResult implements SearchResult {
+  private TreeMap<Long, List<Record>> records;
+  private Token token = null;
+
+  public TreeMap<Long, List<Record>> getRecords() {
+    return records;
+  }
+
+  public void setRecords(TreeMap<Long, List<Record>> records) {
+    this.records = records;
+  }
+
+  public Token getToken() {
+    return token;
+  }
+
+  public void setToken(Token token) {
+    this.token = token;
+  }
 
-
-public class ChukwaSearchResult implements SearchResult
-{
-	private TreeMap<Long, List<Record>> records;
-	private Token token = null;
-	
-	public TreeMap<Long, List<Record>> getRecords()
-	{
-		return records;
-	}
-
-	public void setRecords(TreeMap<Long, List<Record>> records)
-	{
-		this.records = records;
-	}
-
-	public Token getToken()
-	{
-		return token;
-	}
-
-	public void setToken(Token token)
-	{
-		this.token = token;
-	}
-	
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchService.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchService.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchService.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/ChukwaSearchService.java Wed Mar 11 22:39:26 2009
@@ -18,30 +18,27 @@
 
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 import java.util.List;
 import java.util.TreeMap;
-
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceFactory;
 
-public class ChukwaSearchService implements SearchService
-{
-	private DataSourceFactory dataSourceFactory = DataSourceFactory.getInstance();
-	
-	public SearchResult  search(String cluster,String[] dataSources,long t0,long t1,String filter,Token token)
-	throws DataSourceException
-	{
-		SearchResult result = new ChukwaSearchResult();
-		
-		TreeMap<Long, List<Record>> records = new TreeMap<Long,List<Record>> ();
-		result.setRecords(records);
-		
-		for(int i=0;i<dataSources.length;i++)
-		{
-			DataSource ds = dataSourceFactory.getDataSource(dataSources[i]);
-			ds.search(result, cluster, dataSources[i], t0, t1, filter,token);
-		}
-		return result;
-	}
+public class ChukwaSearchService implements SearchService {
+  private DataSourceFactory dataSourceFactory = DataSourceFactory.getInstance();
+
+  public SearchResult search(String cluster, String[] dataSources, long t0,
+      long t1, String filter, Token token) throws DataSourceException {
+    SearchResult result = new ChukwaSearchResult();
+
+    TreeMap<Long, List<Record>> records = new TreeMap<Long, List<Record>>();
+    result.setRecords(records);
+
+    for (int i = 0; i < dataSources.length; i++) {
+      DataSource ds = dataSourceFactory.getDataSource(dataSources[i]);
+      ds.search(result, cluster, dataSources[i], t0, t1, filter, token);
+    }
+    return result;
+  }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Record.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Record.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Record.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Record.java Wed Mar 11 22:39:26 2009
@@ -18,28 +18,32 @@
 
 package org.apache.hadoop.chukwa.extraction.engine;
 
-public interface Record
-{
-	public static final String bodyField = "body";
-	public static final String sourceField = "csource";
-	public static final String applicationField = "capp";
-	public static final String tagsField = "ctags";
+
+public interface Record {
+  public static final String bodyField = "body";
+  public static final String sourceField = "csource";
+  public static final String applicationField = "capp";
+  public static final String tagsField = "ctags";
   public static final String chunkDataField = "cchunkData";
   public static final String chunkExceptionField = "cchunkException";
-  
-	public static final String classField = "class";
-	public static final String logLevelField = "logLevel";
-	
-//	public static final String streamNameField = "sname";
-//	public static final String typeField = "type";
-	
-//	public static final String rawField = "raw";
-	
-//	public static final String fieldSeparator = ":";
-	
-	public long getTime();
-	public void add(String key, String value);
-	public String[] getFields();
-	public String getValue(String field);
-	public String toString();
+
+  public static final String classField = "class";
+  public static final String logLevelField = "logLevel";
+
+  // public static final String streamNameField = "sname";
+  // public static final String typeField = "type";
+
+  // public static final String rawField = "raw";
+
+  // public static final String fieldSeparator = ":";
+
+  public long getTime();
+
+  public void add(String key, String value);
+
+  public String[] getFields();
+
+  public String getValue(String field);
+
+  public String toString();
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/RecordUtil.java Wed Mar 11 22:39:26 2009
@@ -1,23 +1,22 @@
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-public class RecordUtil
-{
-	static  Pattern clusterPattern = Pattern.compile("(.*)?cluster=\"(.*?)\"(.*)?");
-	public static String getClusterName(Record record)
-	{
-		String tags = record.getValue(Record.tagsField);
-		if (tags!= null)
-		{
-			Matcher matcher = clusterPattern.matcher(tags);
-			if (matcher.matches())
-			{
-				return matcher.group(2);
-			}
-		}
-			
-		return "undefined";
-	}
+public class RecordUtil {
+  static Pattern clusterPattern = Pattern
+      .compile("(.*)?cluster=\"(.*?)\"(.*)?");
+
+  public static String getClusterName(Record record) {
+    String tags = record.getValue(Record.tagsField);
+    if (tags != null) {
+      Matcher matcher = clusterPattern.matcher(tags);
+      if (matcher.matches()) {
+        return matcher.group(2);
+      }
+    }
+
+    return "undefined";
+  }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchResult.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchResult.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchResult.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchResult.java Wed Mar 11 22:39:26 2009
@@ -18,16 +18,16 @@
 
 package org.apache.hadoop.chukwa.extraction.engine;
 
+
 import java.util.List;
 import java.util.TreeMap;
 
+public interface SearchResult {
+  public void setToken(Token token);
 
+  public Token getToken();
 
+  public TreeMap<Long, List<Record>> getRecords();
 
-public interface SearchResult
-{
-	public void setToken(Token token);
-	public Token getToken();
-	public TreeMap<Long, List<Record>> getRecords();
-	public void setRecords(TreeMap<Long, List<Record>> records);
+  public void setRecords(TreeMap<Long, List<Record>> records);
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchService.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchService.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchService.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/SearchService.java Wed Mar 11 22:39:26 2009
@@ -18,11 +18,10 @@
 
 package org.apache.hadoop.chukwa.extraction.engine;
 
-import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
 
+import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
 
-public interface SearchService
-{
-	public SearchResult search(String cluster,String[] dataSources,long t0,long t1,String filter,Token token)
-	throws DataSourceException;
+public interface SearchService {
+  public SearchResult search(String cluster, String[] dataSources, long t0,
+      long t1, String filter, Token token) throws DataSourceException;
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Token.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Token.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Token.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/Token.java Wed Mar 11 22:39:26 2009
@@ -1,7 +1,7 @@
 package org.apache.hadoop.chukwa.extraction.engine;
 
-public class Token
-{
-	public String key = null;
-	public boolean hasMore = false;
+
+public class Token {
+  public String key = null;
+  public boolean hasMore = false;
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSource.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSource.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSource.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSource.java Wed Mar 11 22:39:26 2009
@@ -18,20 +18,16 @@
 
 package org.apache.hadoop.chukwa.extraction.engine.datasource;
 
+
 import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
 import org.apache.hadoop.chukwa.extraction.engine.Token;
 
+public interface DataSource {
+
+  public SearchResult search(SearchResult result, String cluster,
+      String dataSource, long t0, long t1, String filter, Token token)
+      throws DataSourceException;
 
+  public boolean isThreadSafe();
 
-public interface DataSource
-{
-	 
-	public SearchResult
-		search(	SearchResult result,String cluster,String dataSource,
-				long t0,long t1,
-				String filter,
-				Token token)
-		throws DataSourceException;
-	public boolean isThreadSafe();
-	
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceException.java Wed Mar 11 22:39:26 2009
@@ -18,30 +18,27 @@
 
 package org.apache.hadoop.chukwa.extraction.engine.datasource;
 
-public class DataSourceException extends Exception
-{
 
-	/**
+public class DataSourceException extends Exception {
+
+  /**
 	 * 
 	 */
-	private static final long serialVersionUID = -3648370237965886781L;
+  private static final long serialVersionUID = -3648370237965886781L;
 
-	public DataSourceException()
-	{}
+  public DataSourceException() {
+  }
 
-	public DataSourceException(String message)
-	{
-		super(message);
-	}
-
-	public DataSourceException(Throwable cause)
-	{
-		super(cause);
-	}
-
-	public DataSourceException(String message, Throwable cause)
-	{
-		super(message, cause);
-	}
+  public DataSourceException(String message) {
+    super(message);
+  }
+
+  public DataSourceException(Throwable cause) {
+    super(cause);
+  }
+
+  public DataSourceException(String message, Throwable cause) {
+    super(message, cause);
+  }
 
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DataSourceFactory.java Wed Mar 11 22:39:26 2009
@@ -18,54 +18,45 @@
 
 package org.apache.hadoop.chukwa.extraction.engine.datasource;
 
-import java.util.HashMap;
 
+import java.util.HashMap;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.database.DatabaseDS;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.record.ChukwaRecordDataSource;
 
-public class DataSourceFactory
-{
-	private static Object lock = new Object();
-	private static DataSourceFactory factory = null;
-	private HashMap<String, DataSource> dataSources = new HashMap<String, DataSource>();
-	
-	private DataSourceFactory()
-	{
-		// TODO load from config Name + class + threadSafe? 
-		
-		DataSource databaseDS = new DatabaseDS();
-		dataSources.put("MRJob", databaseDS);
-		dataSources.put("HodJob", databaseDS);
-		dataSources.put("QueueInfo", databaseDS);
-	
-	}
-	
-	public static DataSourceFactory getInstance()
-	{
-		synchronized(lock)
-		{
-			if ( factory == null)
-			{
-				factory = new DataSourceFactory();
-			}
-		}
-		return factory;
-	}
-	
-	public DataSource getDataSource(String datasourceName)
-	throws DataSourceException
-	{
-		if (dataSources.containsKey(datasourceName))
-		{
-			return dataSources.get(datasourceName);
-		}
-		else
-		{
-			DataSource hsdfsDS = new ChukwaRecordDataSource();
-			dataSources.put(datasourceName, hsdfsDS);
-			return hsdfsDS;
-			//TODO proto only!
-			// throw new DataSourceException("Unknown datasource");
-		}	
-	}
+public class DataSourceFactory {
+  private static Object lock = new Object();
+  private static DataSourceFactory factory = null;
+  private HashMap<String, DataSource> dataSources = new HashMap<String, DataSource>();
+
+  private DataSourceFactory() {
+    // TODO load from config Name + class + threadSafe?
+
+    DataSource databaseDS = new DatabaseDS();
+    dataSources.put("MRJob", databaseDS);
+    dataSources.put("HodJob", databaseDS);
+    dataSources.put("QueueInfo", databaseDS);
+
+  }
+
+  public static DataSourceFactory getInstance() {
+    synchronized (lock) {
+      if (factory == null) {
+        factory = new DataSourceFactory();
+      }
+    }
+    return factory;
+  }
+
+  public DataSource getDataSource(String datasourceName)
+      throws DataSourceException {
+    if (dataSources.containsKey(datasourceName)) {
+      return dataSources.get(datasourceName);
+    } else {
+      DataSource hsdfsDS = new ChukwaRecordDataSource();
+      dataSources.put(datasourceName, hsdfsDS);
+      return hsdfsDS;
+      // TODO proto only!
+      // throw new DataSourceException("Unknown datasource");
+    }
+  }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/DsDirectory.java Wed Mar 11 22:39:26 2009
@@ -18,10 +18,10 @@
 
 package org.apache.hadoop.chukwa.extraction.engine.datasource;
 
+
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.chukwa.conf.ChukwaConfiguration;
 import org.apache.hadoop.chukwa.inputtools.mdl.DataConfig;
 import org.apache.hadoop.conf.Configuration;
@@ -29,81 +29,62 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-public class DsDirectory
-{
-	private static Object lock = new Object();
-	private static DsDirectory dsDirectory = null;
-	private static final String[] emptyArray = new String[0];
-	
-	
-	private String rootFolder = null;
-	private DataConfig dataConfig = null;
-	
-	private static FileSystem fs = null;
-	private static Configuration conf = null;
-	
-	private DsDirectory()
-	{
-		dataConfig = new DataConfig();
-		conf = new ChukwaConfiguration();
-		try
-		{
-			fs = FileSystem.get(conf);
-		} 
-		catch (IOException e)
-		{
-			e.printStackTrace();
-		}
-		rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
-		if (!rootFolder.endsWith("/"))
-		{
-			rootFolder +="/";
-		}
-	}
-	
-	public static DsDirectory getInstance()
-	{
-		synchronized(lock)
-		{
-			if (dsDirectory == null)
-			{
-				dsDirectory = new DsDirectory();
-			}
-		}
-		return dsDirectory;
-	}
-	
-	public String[] list(String cluster)
-	throws DataSourceException
-	{
-		List<String> datasources = new ArrayList<String>();
-		try
-		{
-			FileStatus[] fileStat = fs.listStatus(new Path(rootFolder+cluster));
-			
-			for (FileStatus fstat : fileStat)
-			{
-				if (fstat.isDir())
-				{
-					datasources.add(fstat.getPath().getName());
-				}
-			}
-		} 
-		catch (IOException e)
-		{
-			e.printStackTrace();
-			throw new DataSourceException(e);
-		}
-		return datasources.toArray(emptyArray);
-	}
-	
-	public static void main(String[] args) throws DataSourceException
-	{
-		DsDirectory dsd = DsDirectory.getInstance();
-		String[] dss = dsd.list("unknown");
-		for (String d : dss)
-		{
-			System.out.println(d);
-		}
-	}
+public class DsDirectory {
+  private static Object lock = new Object();
+  private static DsDirectory dsDirectory = null;
+  private static final String[] emptyArray = new String[0];
+
+  private String rootFolder = null;
+  private DataConfig dataConfig = null;
+
+  private static FileSystem fs = null;
+  private static Configuration conf = null;
+
+  private DsDirectory() {
+    dataConfig = new DataConfig();
+    conf = new ChukwaConfiguration();
+    try {
+      fs = FileSystem.get(conf);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    rootFolder = dataConfig.get("chukwa.engine.dsDirectory.rootFolder");
+    if (!rootFolder.endsWith("/")) {
+      rootFolder += "/";
+    }
+  }
+
+  public static DsDirectory getInstance() {
+    synchronized (lock) {
+      if (dsDirectory == null) {
+        dsDirectory = new DsDirectory();
+      }
+    }
+    return dsDirectory;
+  }
+
+  public String[] list(String cluster) throws DataSourceException {
+    List<String> datasources = new ArrayList<String>();
+    try {
+      FileStatus[] fileStat = fs.listStatus(new Path(rootFolder + cluster));
+
+      for (FileStatus fstat : fileStat) {
+        if (fstat.isDir()) {
+          datasources.add(fstat.getPath().getName());
+        }
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+      throw new DataSourceException(e);
+    }
+    return datasources.toArray(emptyArray);
+  }
+
+  public static void main(String[] args) throws DataSourceException {
+    DsDirectory dsd = DsDirectory.getInstance();
+    String[] dss = dsd.list("unknown");
+    for (String d : dss) {
+      System.out.println(d);
+    }
+  }
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/database/DatabaseDS.java Wed Mar 11 22:39:26 2009
@@ -20,6 +20,7 @@
 // From event_viewer.jsp
 package org.apache.hadoop.chukwa.extraction.engine.datasource.database;
 
+
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
@@ -30,146 +31,120 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.TreeMap;
-
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.Record;
 import org.apache.hadoop.chukwa.extraction.engine.SearchResult;
 import org.apache.hadoop.chukwa.extraction.engine.Token;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSource;
 import org.apache.hadoop.chukwa.extraction.engine.datasource.DataSourceException;
+
 //import org.apache.hadoop.chukwa.hicc.ClusterConfig;
 
-public class DatabaseDS implements DataSource
-{
-		
-	public SearchResult search(SearchResult result, String cluster,
-			String dataSource, long t0, long t1, String filter,Token token)
-			throws DataSourceException
-	{
-		SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
-		String timeField = null;
-		TreeMap<Long, List<Record>> records = result.getRecords();
-		
-		 if(cluster==null)
-		 {
-			   cluster="demo";
-		  }
-		
-		if (dataSource.equalsIgnoreCase("MRJob"))
-		{
-			timeField = "LAUNCH_TIME";
-		}
-		else  if (dataSource.equalsIgnoreCase("HodJob"))
-		{
-			timeField = "StartTime";
-		}
-		else if (dataSource.equalsIgnoreCase("QueueInfo"))
-		{
-			timeField = "timestamp";
-		}
-		else
-		{
-			timeField = "timestamp";
-		}
-		String startS = formatter.format(t0);
-	    String endS = formatter.format(t1);
-	    Statement stmt = null;
-	    ResultSet rs = null;
-	    try
-	    {
-	    	String dateclause = timeField + " >= '" + startS 
-	    		+ "' and " + timeField + " <= '" + endS + "'";
-	    	
-		       //ClusterConfig cc = new ClusterConfig();
-		       String jdbc = ""; //cc.getURL(cluster);
-		       
-			   Connection conn = DriverManager.getConnection(jdbc);
-			   
-			   stmt = conn.createStatement();
-			   String query = "";
-			   query = "select * from "+dataSource+" where "+dateclause+";";
-			   rs = stmt.executeQuery(query);
-			   if (stmt.execute(query)) 
-			   {
-			       rs = stmt.getResultSet();
-			       ResultSetMetaData rmeta = rs.getMetaData();
-			       int col = rmeta.getColumnCount();
-			       while (rs.next()) 
-			       {
-			    	   ChukwaRecord event = new ChukwaRecord();
-					   String cell="";
-					   long timestamp = 0;
-					   
-					   for(int i=1;i<col;i++)
-					   {
-					       String value = rs.getString(i);
-					       if(value!=null) 
-					       {
-						   cell=cell+" "+rmeta.getColumnName(i)+":"+value;
-					       }
-					       if(rmeta.getColumnName(i).equals(timeField)) 
-					       {
-					    	   timestamp = rs.getLong(i);
-					    	   event.setTime(timestamp);
-					       }
-					   }
-					   boolean isValid = false;
-					   if(filter == null || filter.equals("")) 
-					   {
-						   isValid = true;
-					   }
-					   else if (cell.indexOf(filter) > 0)
-					   {
-						   isValid = true;
-					   }
-					   if (!isValid)
-					   { continue; }
-					   
-					   event.add(Record.bodyField, cell);
-					   event.add(Record.sourceField, cluster + "." + dataSource );
-					   if (records.containsKey(timestamp))
-					   {
-						   records.get(timestamp).add(event);
-					   }
-					   else
-					   {
-						   List<Record> list = new LinkedList<Record>();
-						   list.add(event);
-						   records.put(event.getTime(), list);
-					   }     
-			       }
-			   }
-	    }
-	    catch (SQLException e)
-	    {
-	    	e.printStackTrace();
-	    	throw new DataSourceException(e);
-	    }
-	    finally 
-	    {
-	    	  if (rs != null) {
-			       try {
-				   rs.close();
-			       } catch (SQLException sqlEx) {
-				   // ignore
-			       }
-			       rs = null;
-			   }
-			   if (stmt != null) {
-			       try {
-				   stmt.close();
-			       } catch (SQLException sqlEx) {
-				   // ignore
-			       }
-			       stmt = null;
-			   }
-	    }
-		return result;
-	}
-
-	public boolean isThreadSafe()
-	{
-		return true;
-	}
+public class DatabaseDS implements DataSource {
+
+  public SearchResult search(SearchResult result, String cluster,
+      String dataSource, long t0, long t1, String filter, Token token)
+      throws DataSourceException {
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd kk:mm:ss");
+    String timeField = null;
+    TreeMap<Long, List<Record>> records = result.getRecords();
+
+    if (cluster == null) {
+      cluster = "demo";
+    }
+
+    if (dataSource.equalsIgnoreCase("MRJob")) {
+      timeField = "LAUNCH_TIME";
+    } else if (dataSource.equalsIgnoreCase("HodJob")) {
+      timeField = "StartTime";
+    } else if (dataSource.equalsIgnoreCase("QueueInfo")) {
+      timeField = "timestamp";
+    } else {
+      timeField = "timestamp";
+    }
+    String startS = formatter.format(t0);
+    String endS = formatter.format(t1);
+    Statement stmt = null;
+    ResultSet rs = null;
+    try {
+      String dateclause = timeField + " >= '" + startS + "' and " + timeField
+          + " <= '" + endS + "'";
+
+      // ClusterConfig cc = new ClusterConfig();
+      String jdbc = ""; // cc.getURL(cluster);
+
+      Connection conn = DriverManager.getConnection(jdbc);
+
+      stmt = conn.createStatement();
+      String query = "";
+      query = "select * from " + dataSource + " where " + dateclause + ";";
+      rs = stmt.executeQuery(query);
+      if (stmt.execute(query)) {
+        rs = stmt.getResultSet();
+        ResultSetMetaData rmeta = rs.getMetaData();
+        int col = rmeta.getColumnCount();
+        while (rs.next()) {
+          ChukwaRecord event = new ChukwaRecord();
+          String cell = "";
+          long timestamp = 0;
+
+          for (int i = 1; i < col; i++) {
+            String value = rs.getString(i);
+            if (value != null) {
+              cell = cell + " " + rmeta.getColumnName(i) + ":" + value;
+            }
+            if (rmeta.getColumnName(i).equals(timeField)) {
+              timestamp = rs.getLong(i);
+              event.setTime(timestamp);
+            }
+          }
+          boolean isValid = false;
+          if (filter == null || filter.equals("")) {
+            isValid = true;
+          } else if (cell.indexOf(filter) > 0) {
+            isValid = true;
+          }
+          if (!isValid) {
+            continue;
+          }
+
+          event.add(Record.bodyField, cell);
+          event.add(Record.sourceField, cluster + "." + dataSource);
+          if (records.containsKey(timestamp)) {
+            records.get(timestamp).add(event);
+          } else {
+            List<Record> list = new LinkedList<Record>();
+            list.add(event);
+            records.put(event.getTime(), list);
+          }
+        }
+      }
+    } catch (SQLException e) {
+      e.printStackTrace();
+      throw new DataSourceException(e);
+    } finally {
+      if (rs != null) {
+        try {
+          rs.close();
+        } catch (SQLException sqlEx) {
+          // ignore
+        }
+        rs = null;
+      }
+      if (stmt != null) {
+        try {
+          stmt.close();
+        } catch (SQLException sqlEx) {
+          // ignore
+        }
+        stmt = null;
+      }
+    }
+    return result;
+  }
+
+  public boolean isThreadSafe() {
+    return true;
+  }
 
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaDSInternalResult.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaDSInternalResult.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaDSInternalResult.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaDSInternalResult.java Wed Mar 11 22:39:26 2009
@@ -1,21 +1,20 @@
 package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
 
-import java.util.List;
 
+import java.util.List;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
 import org.apache.hadoop.chukwa.extraction.engine.Record;
 
-public class ChukwaDSInternalResult
-{
-	List<Record> records = null;
-	String day = null;
-	int hour = 0;
-	int rawIndex = 0;
-	int spill = 1;
-	long position = -1;
-	long currentTs = -1;
-	
-	String fileName = null;
-	
-	ChukwaRecordKey key = null;
+public class ChukwaDSInternalResult {
+  List<Record> records = null;
+  String day = null;
+  int hour = 0;
+  int rawIndex = 0;
+  int spill = 1;
+  long position = -1;
+  long currentTs = -1;
+
+  String fileName = null;
+
+  ChukwaRecordKey key = null;
 }

Modified: hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java?rev=752666&r1=752665&r2=752666&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java (original)
+++ hadoop/chukwa/trunk/src/java/org/apache/hadoop/chukwa/extraction/engine/datasource/record/ChukwaFileParser.java Wed Mar 11 22:39:26 2009
@@ -18,153 +18,128 @@
 
 package org.apache.hadoop.chukwa.extraction.engine.datasource.record;
 
+
 import java.io.IOException;
 import java.util.LinkedList;
 import java.util.List;
-
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.Record;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+public class ChukwaFileParser {
+  static final int timestampField = 0;
 
-
-public class ChukwaFileParser
-{
-	static final int timestampField = 0;
-		
-	
-	@SuppressWarnings("deprecation")
-	public static  List<Record> readData(String cluster,String dataSource,int maxRows,long t1, long t0,
-			long maxOffset,String filter,String fileName,FileSystem fs ) throws
-			MalformedFileFormat
-	{
-	
-		//String source = "NameNode." + fileName;
-		List<Record> records = new LinkedList<Record>();
-		FSDataInputStream dataIS = null;
-		int lineCount = 0;
-		
-		try
-		{
-			
-			if (!fs.exists(new Path(fileName)))
-			{
-				System.out.println("fileName not there!");
-				return records;
-			}
-			System.out.println("NameNodeParser Open [" +fileName + "]");
-			
-			dataIS = fs.open(new Path(fileName));
-			System.out.println("NameNodeParser Open2 [" +fileName + "]");
-			
-			long timestamp = 0;
-			int listSize = 0;
-			String line = null;
-			String[] data = null;
-			long offset = 0;
-			
-			
-			do
-			{
-				offset = dataIS.getPos();
-				
-				// Need TODO something here
-//				if (offset > maxOffset)
-//				{
-//					break;
-//				}
-				
-				line = dataIS.readLine();
-				lineCount ++;
-//				System.out.println("NameNodeParser Line [" +line + "]");	
-				if (line != null)
-				{
-					
-					//empty lines
-					if (line.length() < 14)
-					{
-//						System.out.println("NameNodeParser Line < 14! [" +line + "]");
-						continue;
-					}
-//					System.out.println("Line [" +line + "]");
-					data = line.split("\t");// Default separator for TextOutputFormat!
-					
-					try
-					{
-						timestamp = Long.parseLong(data[timestampField]);
-						
-					} catch (Exception e)
-					{
-						e.printStackTrace();
-						//throw new MalformedFileFormat(e);
-					}
-					if (timestamp < t0) 
-					{
-//						 System.out.println("Line not in range. Skipping: " +line);
-//						 System.out.println("Search for: " + new Date(t0) + " is :" + new Date(timestamp));
-						 continue;
-					} 
-					else if ((timestamp < t1) && (offset < maxOffset )) //JB (epochTS < maxDate)
-					{
-						
-//						System.out.println("In Range: " + line);
-						boolean valid = false;
-						
-						 if ( (filter == null || filter.equals("") ))
-						 {
-							 valid = true;
-						 }
-						 else if (line.indexOf(filter) > 0)
-						   {
-							   valid = true;
-						   }
-						 
-						 if (valid)
-						 {
-//							System.out.println("In Range In Filter: " + line);
-							ChukwaRecord record = new ChukwaRecord();
-							record.setTime(timestamp);
-							record.add("offset", ""+offset);
-							record.add(Record.bodyField, data[1]);
-							record.add(Record.sourceField, dataSource);
-							
-							records.add(record);
-							listSize = records.size();
-							if (listSize > maxRows)
-							{
-								records.remove(0);
-//								System.out.println("==========>>>>>REMOVING: " + e);
-							}
-						 }
-						else 
-						{
-//							System.out.println("In Range ==================>>>>>>>>> OUT Regex: " + line);
-						}
-
-					}
-					else
-					{
-//						 System.out.println("Line out of range. Stopping now: " +line);
-						break;
-					}
-				}
-
-			} while (line != null);			
-		}
-		catch(Exception e)
-		{
-			e.printStackTrace();
-		}
-		finally
-		{
-			System.out.println("File: " +fileName +" Line count: " + lineCount);
-			try
-			{dataIS.close();} 
-			catch (IOException e)
-			{}
-		}
-		return records;
-	}
+  @SuppressWarnings("deprecation")
+  public static List<Record> readData(String cluster, String dataSource,
+      int maxRows, long t1, long t0, long maxOffset, String filter,
+      String fileName, FileSystem fs) throws MalformedFileFormat {
+
+    // String source = "NameNode." + fileName;
+    List<Record> records = new LinkedList<Record>();
+    FSDataInputStream dataIS = null;
+    int lineCount = 0;
+
+    try {
+
+      if (!fs.exists(new Path(fileName))) {
+        System.out.println("fileName not there!");
+        return records;
+      }
+      System.out.println("NameNodeParser Open [" + fileName + "]");
+
+      dataIS = fs.open(new Path(fileName));
+      System.out.println("NameNodeParser Open2 [" + fileName + "]");
+
+      long timestamp = 0;
+      int listSize = 0;
+      String line = null;
+      String[] data = null;
+      long offset = 0;
+
+      do {
+        offset = dataIS.getPos();
+
+        // Need TODO something here
+        // if (offset > maxOffset)
+        // {
+        // break;
+        // }
+
+        line = dataIS.readLine();
+        lineCount++;
+        // System.out.println("NameNodeParser Line [" +line + "]");
+        if (line != null) {
+
+          // empty lines
+          if (line.length() < 14) {
+            // System.out.println("NameNodeParser Line < 14! [" +line + "]");
+            continue;
+          }
+          // System.out.println("Line [" +line + "]");
+          data = line.split("\t");// Default separator for TextOutputFormat!
+
+          try {
+            timestamp = Long.parseLong(data[timestampField]);
+
+          } catch (Exception e) {
+            e.printStackTrace();
+            // throw new MalformedFileFormat(e);
+          }
+          if (timestamp < t0) {
+            // System.out.println("Line not in range. Skipping: " +line);
+            // System.out.println("Search for: " + new Date(t0) + " is :" + new
+            // Date(timestamp));
+            continue;
+          } else if ((timestamp < t1) && (offset < maxOffset)) // JB (epochTS <
+                                                               // maxDate)
+          {
+
+            // System.out.println("In Range: " + line);
+            boolean valid = false;
+
+            if ((filter == null || filter.equals(""))) {
+              valid = true;
+            } else if (line.indexOf(filter) > 0) {
+              valid = true;
+            }
+
+            if (valid) {
+              // System.out.println("In Range In Filter: " + line);
+              ChukwaRecord record = new ChukwaRecord();
+              record.setTime(timestamp);
+              record.add("offset", "" + offset);
+              record.add(Record.bodyField, data[1]);
+              record.add(Record.sourceField, dataSource);
+
+              records.add(record);
+              listSize = records.size();
+              if (listSize > maxRows) {
+                records.remove(0);
+                // System.out.println("==========>>>>>REMOVING: " + e);
+              }
+            } else {
+              // System.out.println(
+              // "In Range ==================>>>>>>>>> OUT Regex: " + line);
+            }
+
+          } else {
+            // System.out.println("Line out of range. Stopping now: " +line);
+            break;
+          }
+        }
+
+      } while (line != null);
+    } catch (Exception e) {
+      e.printStackTrace();
+    } finally {
+      System.out.println("File: " + fileName + " Line count: " + lineCount);
+      try {
+        dataIS.close();
+      } catch (IOException e) {
+      }
+    }
+    return records;
+  }
 }



Mime
View raw message