chukwa-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ey...@apache.org
Subject svn commit: r1612608 - in /chukwa/trunk: ./ src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/ src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/ src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor...
Date Tue, 22 Jul 2014 16:50:12 GMT
Author: eyang
Date: Tue Jul 22 16:50:11 2014
New Revision: 1612608

URL: http://svn.apache.org/r1612608
Log:
CHUKWA-713. Improved error handling and test case for System Metrics.  (Shreyas Subramanya
via Eric Yang)

Modified:
    chukwa/trunk/CHANGES.txt
    chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SigarRunner.java
    chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
    chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJsonProcessors.java

Modified: chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/chukwa/trunk/CHANGES.txt?rev=1612608&r1=1612607&r2=1612608&view=diff
==============================================================================
--- chukwa/trunk/CHANGES.txt (original)
+++ chukwa/trunk/CHANGES.txt Tue Jul 22 16:50:11 2014
@@ -36,6 +36,8 @@ Release 0.6 - Unreleased
 
   IMPROVEMENTS
 
+    CHUKWA-713. Improved error handling and test case for System Metrics.  (Shreyas Subramanya
via Eric Yang)
+
     CHUKWA-721. Updated Chukwa document to reflect changes in Chukwa 0.6.  (Eric Yang)
 
     CHUKWA-718. Updated Chukwa Agent REST API document and generation method.  (Eric Yang)

Modified: chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SigarRunner.java
URL: http://svn.apache.org/viewvc/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SigarRunner.java?rev=1612608&r1=1612607&r2=1612608&view=diff
==============================================================================
--- chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SigarRunner.java
(original)
+++ chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/datacollection/adaptor/sigar/SigarRunner.java
Tue Jul 22 16:50:11 2014
@@ -21,6 +21,7 @@ package org.apache.hadoop.chukwa.datacol
 import java.util.HashMap;
 import java.util.TimerTask;
 
+import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.datacollection.ChunkReceiver;
 import org.apache.hadoop.chukwa.util.ExceptionUtil;
@@ -30,6 +31,7 @@ import org.hyperic.sigar.CpuPerc;
 import org.hyperic.sigar.FileSystem;
 import org.hyperic.sigar.FileSystemUsage;
 import org.hyperic.sigar.Mem;
+import org.hyperic.sigar.SigarException;
 import org.hyperic.sigar.Swap;
 import org.hyperic.sigar.NetInterfaceStat;
 import org.hyperic.sigar.Sigar;
@@ -70,103 +72,136 @@ public class SigarRunner extends TimerTa
     JSONObject json = new JSONObject();
     try {
       // CPU utilization
-      cpuinfo = sigar.getCpuInfoList();
-      cpuPerc = sigar.getCpuPercList();
-      JSONArray cpuList = new JSONArray();
-      for (int i = 0; i < cpuinfo.length; i++) {
-        JSONObject cpuMap = new JSONObject();
-        cpuMap.putAll(cpuinfo[i].toMap());
-        cpuMap.put("combined", cpuPerc[i].getCombined());
-        cpuMap.put("user", cpuPerc[i].getUser());
-        cpuMap.put("sys", cpuPerc[i].getSys());
-        cpuMap.put("idle", cpuPerc[i].getIdle());
-        cpuMap.put("wait", cpuPerc[i].getWait());
-        cpuMap.put("nice", cpuPerc[i].getNice());
-        cpuMap.put("irq", cpuPerc[i].getIrq());
-        cpuList.add(cpuMap);
+      JSONArray load = new JSONArray();
+      try {
+        cpuinfo = sigar.getCpuInfoList();
+        cpuPerc = sigar.getCpuPercList();
+        JSONArray cpuList = new JSONArray();
+        for (int i = 0; i < cpuinfo.length; i++) {
+          JSONObject cpuMap = new JSONObject();
+          cpuMap.putAll(cpuinfo[i].toMap());
+          cpuMap.put("combined", cpuPerc[i].getCombined());
+          cpuMap.put("user", cpuPerc[i].getUser());
+          cpuMap.put("sys", cpuPerc[i].getSys());
+          cpuMap.put("idle", cpuPerc[i].getIdle());
+          cpuMap.put("wait", cpuPerc[i].getWait());
+          cpuMap.put("nice", cpuPerc[i].getNice());
+          cpuMap.put("irq", cpuPerc[i].getIrq());
+          cpuList.add(cpuMap);
+        }
+        sigar.getCpuPerc();
+        json.put("cpu", cpuList);
+	      
+        // Uptime
+        uptime = sigar.getUptime();
+        json.put("uptime", uptime.getUptime());
+	      
+        // Load Average
+        loadavg = sigar.getLoadAverage();	      
+        load.add(loadavg[0]);
+        load.add(loadavg[1]);
+        load.add(loadavg[2]);
+      } catch(SigarException se) {
+        log.error("SigarException caused during collection of CPU utilization");
+        log.error(ExceptionUtils.getStackTrace(se));
+      } finally {
+        json.put("loadavg", load);
       }
-      sigar.getCpuPerc();
-      json.put("cpu", cpuList);
-      
-      // Uptime
-      uptime = sigar.getUptime();
-      json.put("uptime", uptime.getUptime());
       
-      // Load Average
-      loadavg = sigar.getLoadAverage();
-      JSONArray load = new JSONArray();
-      load.add(loadavg[0]);
-      load.add(loadavg[1]);
-      load.add(loadavg[2]);
-      json.put("loadavg", load);
 
       // Memory Utilization
-      mem = sigar.getMem();
       JSONObject memMap = new JSONObject();
-      memMap.putAll(mem.toMap());
-      json.put("memory", memMap);
-
-      // Swap Utilization
-      swap = sigar.getSwap();
       JSONObject swapMap = new JSONObject();
-      swapMap.putAll(swap.toMap());
-      json.put("swap", swapMap);
+      try {
+        mem = sigar.getMem();
+        memMap.putAll(mem.toMap());	      
+	
+        // Swap Utilization
+        swap = sigar.getSwap();	      
+        swapMap.putAll(swap.toMap());	      
+      } catch(SigarException se){
+        log.error("SigarException caused during collection of Memory utilization");
+        log.error(ExceptionUtils.getStackTrace(se));
+      } finally {
+        json.put("memory", memMap);
+        json.put("swap", swapMap);
+      }
       
       // Network Utilization
-      netIf = sigar.getNetInterfaceList();
       JSONArray netInterfaces = new JSONArray();
-      for (int i = 0; i < netIf.length; i++) {
-        NetInterfaceStat net = new NetInterfaceStat();
-        net = sigar.getNetInterfaceStat(netIf[i]);
-        JSONObject netMap = new JSONObject();
-        netMap.putAll(net.toMap());
-        if(previousNetworkStats.containsKey(netIf[i])) {
-          JSONObject deltaMap = previousNetworkStats.get(netIf[i]);
-          deltaMap.put("RxBytes", Long.parseLong(netMap.get("RxBytes").toString()) - Long.parseLong(deltaMap.get("RxBytes").toString()));
-          deltaMap.put("RxDropped", Long.parseLong(netMap.get("RxDropped").toString()) -
Long.parseLong(deltaMap.get("RxDropped").toString()));
-          deltaMap.put("RxErrors", Long.parseLong(netMap.get("RxErrors").toString()) - Long.parseLong(deltaMap.get("RxErrors").toString()));
-          deltaMap.put("RxPackets", Long.parseLong(netMap.get("RxPackets").toString()) -
Long.parseLong(deltaMap.get("RxPackets").toString()));
-          deltaMap.put("TxBytes", Long.parseLong(netMap.get("TxBytes").toString()) - Long.parseLong(deltaMap.get("TxBytes").toString()));
-          deltaMap.put("TxCollisions", Long.parseLong(netMap.get("TxCollisions").toString())
- Long.parseLong(deltaMap.get("TxCollisions").toString()));
-          deltaMap.put("TxErrors", Long.parseLong(netMap.get("TxErrors").toString()) - Long.parseLong(deltaMap.get("TxErrors").toString()));
-          deltaMap.put("TxPackets", Long.parseLong(netMap.get("TxPackets").toString()) -
Long.parseLong(deltaMap.get("TxPackets").toString()));
-          netInterfaces.add(deltaMap);
-          skip = false;
-        } else {
-          netInterfaces.add(netMap);
-          skip = true;
+      try {
+        netIf = sigar.getNetInterfaceList();
+        for (int i = 0; i < netIf.length; i++) {
+          NetInterfaceStat net = new NetInterfaceStat();
+          try {
+            net = sigar.getNetInterfaceStat(netIf[i]);
+          } catch(SigarException e){
+            // Ignore the exception when trying to stat network interface
+            log.warn("SigarException trying to stat network device "+netIf[i]);
+            continue;
+          }
+          JSONObject netMap = new JSONObject();
+          netMap.putAll(net.toMap());
+          if(previousNetworkStats.containsKey(netIf[i])) {
+            JSONObject deltaMap = previousNetworkStats.get(netIf[i]);
+            deltaMap.put("RxBytes", Long.parseLong(netMap.get("RxBytes").toString()) - Long.parseLong(deltaMap.get("RxBytes").toString()));
+            deltaMap.put("RxDropped", Long.parseLong(netMap.get("RxDropped").toString())
- Long.parseLong(deltaMap.get("RxDropped").toString()));
+            deltaMap.put("RxErrors", Long.parseLong(netMap.get("RxErrors").toString()) -
Long.parseLong(deltaMap.get("RxErrors").toString()));
+            deltaMap.put("RxPackets", Long.parseLong(netMap.get("RxPackets").toString())
- Long.parseLong(deltaMap.get("RxPackets").toString()));
+            deltaMap.put("TxBytes", Long.parseLong(netMap.get("TxBytes").toString()) - Long.parseLong(deltaMap.get("TxBytes").toString()));
+            deltaMap.put("TxCollisions", Long.parseLong(netMap.get("TxCollisions").toString())
- Long.parseLong(deltaMap.get("TxCollisions").toString()));
+            deltaMap.put("TxErrors", Long.parseLong(netMap.get("TxErrors").toString()) -
Long.parseLong(deltaMap.get("TxErrors").toString()));
+            deltaMap.put("TxPackets", Long.parseLong(netMap.get("TxPackets").toString())
- Long.parseLong(deltaMap.get("TxPackets").toString()));
+            netInterfaces.add(deltaMap);
+            skip = false;
+          } else {
+            netInterfaces.add(netMap);
+            skip = true;
+          }
+          previousNetworkStats.put(netIf[i], netMap);
         }
-        previousNetworkStats.put(netIf[i], netMap);
+      } catch(SigarException se){
+        log.error("SigarException caused during collection of Network utilization");
+        log.error(ExceptionUtils.getStackTrace(se));
+      } finally {
+        json.put("network", netInterfaces);
       }
-      json.put("network", netInterfaces);
 
       // Filesystem Utilization
-      fs = sigar.getFileSystemList();
       JSONArray fsList = new JSONArray();
-      for (int i = 0; i < fs.length; i++) {
-        FileSystemUsage usage = sigar.getFileSystemUsage(fs[i].getDirName());
-        JSONObject fsMap = new JSONObject();
-        fsMap.putAll(fs[i].toMap());
-        fsMap.put("ReadBytes", usage.getDiskReadBytes());
-        fsMap.put("Reads", usage.getDiskReads());
-        fsMap.put("WriteBytes", usage.getDiskWriteBytes());
-        fsMap.put("Writes", usage.getDiskWrites());
-        if(previousDiskStats.containsKey(fs[i].getDevName())) {
-          JSONObject deltaMap = previousDiskStats.get(fs[i].getDevName());
-          deltaMap.put("ReadBytes", usage.getDiskReadBytes() - (Long) deltaMap.get("ReadBytes"));
-          deltaMap.put("Reads", usage.getDiskReads() - (Long) deltaMap.get("Reads"));
-          deltaMap.put("WriteBytes", usage.getDiskWriteBytes() - (Long) deltaMap.get("WriteBytes"));
-          deltaMap.put("Writes", usage.getDiskWrites() - (Long) deltaMap.get("Writes"));
-          deltaMap.putAll(fs[i].toMap());
-          fsList.add(deltaMap);
-          skip = false;
-        } else {
-          fsList.add(fsMap);
-          skip = true;
+      try {
+        fs = sigar.getFileSystemList();
+        for (int i = 0; i < fs.length; i++) {
+          FileSystemUsage usage = sigar.getFileSystemUsage(fs[i].getDirName());
+          JSONObject fsMap = new JSONObject();
+          fsMap.putAll(fs[i].toMap());
+          fsMap.put("ReadBytes", usage.getDiskReadBytes());
+          fsMap.put("Reads", usage.getDiskReads());
+          fsMap.put("WriteBytes", usage.getDiskWriteBytes());
+          fsMap.put("Writes", usage.getDiskWrites());
+          if(previousDiskStats.containsKey(fs[i].getDevName())) {
+            JSONObject deltaMap = previousDiskStats.get(fs[i].getDevName());
+            deltaMap.put("ReadBytes", usage.getDiskReadBytes() - (Long) deltaMap.get("ReadBytes"));
+            deltaMap.put("Reads", usage.getDiskReads() - (Long) deltaMap.get("Reads"));
+            deltaMap.put("WriteBytes", usage.getDiskWriteBytes() - (Long) deltaMap.get("WriteBytes"));
+            deltaMap.put("Writes", usage.getDiskWrites() - (Long) deltaMap.get("Writes"));
+            deltaMap.put("Total", usage.getTotal());
+            deltaMap.put("Used", usage.getUsed());
+            deltaMap.putAll(fs[i].toMap());
+            fsList.add(deltaMap);
+            skip = false;
+          } else {
+            fsList.add(fsMap);
+            skip = true;
+          }
+          previousDiskStats.put(fs[i].getDevName(), fsMap);          
         }
-        previousDiskStats.put(fs[i].getDevName(), fsMap);          
+      } catch(SigarException se){
+        log.error("SigarException caused during collection of FileSystem utilization");
+        log.error(ExceptionUtils.getStackTrace(se));
+      } finally {
+        json.put("disk", fsList);
       }
-      json.put("disk", fsList);
       json.put("timestamp", System.currentTimeMillis());
       byte[] data = json.toString().getBytes();
       sendOffset += data.length;

Modified: chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
URL: http://svn.apache.org/viewvc/chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java?rev=1612608&r1=1612607&r2=1612608&view=diff
==============================================================================
--- chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
(original)
+++ chukwa/trunk/src/main/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/SystemMetrics.java
Tue Jul 22 16:50:11 2014
@@ -61,8 +61,14 @@ public class SystemMetrics extends Abstr
     double user = 0.0;
     double sys = 0.0;
     double idle = 0.0;
+    int actualSize = 0;
     for(int i = 0; i< cpuList.size(); i++) {
       JSONObject cpu = (JSONObject) cpuList.get(i);
+      //Work around for sigar returning null sometimes for cpu metrics on pLinux
+      if(cpu.get("combined") == null){
+    	  continue;
+      }
+      actualSize++;
       Iterator<String> keys = cpu.keySet().iterator();
       combined = combined + Double.parseDouble(cpu.get("combined").toString());
       user = user + Double.parseDouble(cpu.get("user").toString());
@@ -73,10 +79,10 @@ public class SystemMetrics extends Abstr
         record.add(key + "." + i, cpu.get(key).toString());
       }
     }
-    combined = combined / cpuList.size();
-    user = user / cpuList.size();
-    sys = sys / cpuList.size();
-    idle = idle / cpuList.size();
+    combined = combined / actualSize;
+    user = user / actualSize;
+    sys = sys / actualSize;
+    idle = idle / actualSize;
     record.add("combined", Double.toString(combined));
     record.add("user", Double.toString(user));
     record.add("idle", Double.toString(idle));    
@@ -165,6 +171,8 @@ public class SystemMetrics extends Abstr
     double reads = 0;
     double writeBytes = 0;
     double writes = 0;
+    double total = 0;
+    double used = 0;
     record = new ChukwaRecord();
     JSONArray diskList = (JSONArray) json.get("disk");
     for(int i = 0;i < diskList.size(); i++) {
@@ -181,13 +189,21 @@ public class SystemMetrics extends Abstr
           writeBytes = writeBytes + (Long) disk.get("WriteBytes");
         } else if(key.equals("Writes")) {
           writes = writes + (Long) disk.get("Writes");
+        }  else if(key.equals("Total")) {
+          total = total + (Long) disk.get("Total");
+        } else if(key.equals("Used")) {
+          used = used + (Long) disk.get("Used");
         }
       }
     }
+    double percentUsed = used/total; 
     record.add("ReadBytes", Double.toString(readBytes));
     record.add("Reads", Double.toString(reads));
     record.add("WriteBytes", Double.toString(writeBytes));
-    record.add("Writes", Double.toString(writes));    
+    record.add("Writes", Double.toString(writes));
+    record.add("Total", Double.toString(total));
+    record.add("Used", Double.toString(used));
+    record.add("PercentUsed", Double.toString(percentUsed));
     buildGenericRecord(record, null, cal.getTimeInMillis(), "disk");
     output.collect(key, record);
     

Modified: chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJsonProcessors.java
URL: http://svn.apache.org/viewvc/chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJsonProcessors.java?rev=1612608&r1=1612607&r2=1612608&view=diff
==============================================================================
--- chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJsonProcessors.java
(original)
+++ chukwa/trunk/src/test/java/org/apache/hadoop/chukwa/extraction/demux/processor/mapper/TestJsonProcessors.java
Tue Jul 22 16:50:11 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.chukwa.Chunk;
 import org.apache.hadoop.chukwa.ChunkImpl;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord;
 import org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey;
+import org.apache.hadoop.chukwa.extraction.engine.Record;
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 
@@ -49,7 +50,7 @@ public class TestJsonProcessors extends 
 		ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output = new ChukwaTestOutputCollector<ChukwaRecordKey,
ChukwaRecord>();
 		p.process(new ChukwaArchiveKey(), chunk, output, null);
 		HashMap<ChukwaRecordKey, ChukwaRecord> outData = output.data;
-
+		
 		// First get all ChukwaRecords and then get all field-data pairs within
 		// each record
 		Iterator<Entry<ChukwaRecordKey, ChukwaRecord>> recordIter = outData
@@ -60,8 +61,10 @@ public class TestJsonProcessors extends 
 			ChukwaRecord value = recordEntry.getValue();
 			String[] fields = value.getFields();
 			for (String field : fields) {
-				//ignore ctags
-				if(field.equals("ctags")) {
+				//ignore ctags, capps, csource
+				if (field.equals(Record.tagsField)
+						|| field.equals(Record.applicationField)
+						|| field.equals(Record.sourceField)) {
 					continue;
 				}
 				String data = value.getValue(field);
@@ -231,5 +234,107 @@ public class TestJsonProcessors extends 
 		failMsg = testProcessor(p, json, ch);
 		assertNull(failMsg, failMsg);
 	}
+	
+	@SuppressWarnings("unchecked")
+	public void testSysteMetricsProcessor() {
+		JSONObject system = new JSONObject();
+		JSONObject memory = new JSONObject();
+		JSONObject cpu1 = new JSONObject();
+		JSONObject cpu2 = new JSONObject();
+		JSONObject cpu3 = new JSONObject();
+		JSONObject cpu4 = new JSONObject();
+		JSONObject disk1 = new JSONObject();
+		JSONObject disk2 = new JSONObject();
+		JSONObject network1 = new JSONObject();
+		JSONObject network2 = new JSONObject();
+
+		JSONArray cpu = new JSONArray();
+		JSONArray loadAvg = new JSONArray();
+		JSONArray disk = new JSONArray();
+		JSONArray network = new JSONArray();
+
+		memory.put("Total", "130980773888");
+		memory.put("UsedPercent", "4.493927773730516");
+		memory.put("FreePercent", "95.50607222626948");
+		memory.put("ActualFree", "125094592512");
+		memory.put("ActualUsed", "5886181376");
+		memory.put("Free", "34487599104");
+		memory.put("Used", "96493174784");
+		memory.put("Ram", "124920");
+		system.put("memory", memory);
+
+		system.put("timestamp", 1353981082318L);
+		system.put("uptime", "495307.98");
+
+		cpu1.put("combined", 0.607);
+		cpu1.put("user", 0.49);
+		cpu1.put("idle", 0.35);
+		cpu1.put("sys", 0.116);
+		cpu2.put("combined", 0.898);
+		cpu2.put("user", 0.69);
+		cpu2.put("idle", 0.06);
+		cpu2.put("sys", 0.202);
+		// include chunks which have null values, to simulate sigar issue on
+		// pLinux
+		cpu3.put("combined", null);
+		cpu3.put("user", null);
+		cpu3.put("idle", null);
+		cpu3.put("sys", null);
+		cpu4.put("combined", "null");
+		cpu4.put("user", "null");
+		cpu4.put("idle", "null");
+		cpu4.put("sys", "null");
+		cpu.add(cpu1);
+		cpu.add(cpu2);
+		cpu.add(cpu3);
+		system.put("cpu", cpu);
+
+		loadAvg.add("0.16");
+		loadAvg.add("0.09");
+		loadAvg.add("0.06");
+		system.put("loadavg", loadAvg);
+
+		disk1.put("ReadBytes", 220000000000L);
+		disk1.put("Reads", 12994476L);
+		disk2.put("ReadBytes", 678910987L);
+		disk2.put("Reads", 276L);
+		disk.add(disk1);
+		disk.add(disk2);
+		system.put("disk", disk);
+
+		network1.put("RxBytes", 7234832487L);
+		network2.put("RxBytes", 8123023483L);
+		network.add(network1);
+		network.add(network2);
+		system.put("network", network);
+
+		byte[] data = system.toString().getBytes();
+		// parse with
+		// org.apache.hadoop.chukwa.extraction.demux.processor.mapper.SystemMetrics
+		// and verify cpu usage aggregates
+		SystemMetrics p = new SystemMetrics();
+		ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
+				null);
+		ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output = new ChukwaTestOutputCollector<ChukwaRecordKey,
ChukwaRecord>();
+		p.process(new ChukwaArchiveKey(), ch, output, null);
+		HashMap<ChukwaRecordKey, ChukwaRecord> outData = output.data;
+		Iterator<Entry<ChukwaRecordKey, ChukwaRecord>> recordIter = outData
+				.entrySet().iterator();
+		while (recordIter.hasNext()) {
+			Entry<ChukwaRecordKey, ChukwaRecord> recordEntry = recordIter
+					.next();
+			ChukwaRecordKey key = recordEntry.getKey();
+			ChukwaRecord value = recordEntry.getValue();
+			if (value.getValue("combined") != null) {
+				assertEquals(Double.parseDouble(value.getValue("combined")),
+						0.7525);
+				assertEquals(Double.parseDouble(value.getValue("user")), 0.59);
+				assertEquals(Double.parseDouble(value.getValue("sys")), 0.159);
+				assertEquals(Double.parseDouble(value.getValue("idle")), 0.205);
+				System.out.println("CPU metrics verified");
+			}
+		}
+
+	}
 }
 



Mime
View raw message