eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yonzhang2...@apache.org
Subject [3/4] incubator-eagle git commit: EAGLE-331 ingestion+alert engine preview ingestion + alert engine preview
Date Thu, 09 Jun 2016 23:18:49 GMT
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java
index 3fdf322..b6b9439 100755
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java
+++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestFlatAggregator.java
@@ -23,9 +23,8 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.eagle.query.aggregate.timeseries.FlatAggregator;
-import junit.framework.Assert;
-
 import org.apache.eagle.query.aggregate.AggregateFunctionType;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -85,7 +84,7 @@ public class TestFlatAggregator {
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 1);
 			Assert.assertEquals(result.get(new ArrayList<String>()).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+
-					entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
+					entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -99,7 +98,7 @@ public class TestFlatAggregator {
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 1);
 			Assert.assertEquals(result.get(new ArrayList<String>()).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+
-					entities[2].getNumClusters()+entities[3].getNumClusters()+entities[4].getNumClusters()));
+					entities[2].getNumClusters()+entities[3].getNumClusters()+entities[4].getNumClusters()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -112,7 +111,7 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 1);
-			Assert.assertEquals(result.get(new ArrayList<String>()).get(0), (double)(5));
+			Assert.assertEquals(result.get(new ArrayList<String>()).get(0), (double)(5), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -135,8 +134,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -149,8 +148,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts())+entities[3].getNumHosts());
-			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts())+entities[3].getNumHosts(), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -163,8 +162,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -177,8 +176,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters())+entities[3].getNumClusters());
-			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumClusters()));
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters())+entities[3].getNumClusters(), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(entities[4].getNumClusters()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -202,8 +201,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(3));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(2));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(3), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(2), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -216,8 +215,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4));
-			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(1));
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(4), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc2")).get(0), (double)(1), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -241,9 +240,9 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(3, result.size());
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(entities[5].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(entities[5].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -256,11 +255,11 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(5, result.size());
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(entities[5].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(entities[5].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -284,9 +283,9 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(3, result.size());
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(3));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(1));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1")).get(0), (double)(3), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1")).get(0), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned")).get(0), (double)(1), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -299,11 +298,11 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(5, result.size());
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(1));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack123")).get(0), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "dc1", "rack128")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack125")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "dc1", "rack126")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "unassigned", "rack126")).get(0), (double)(1), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -327,10 +326,10 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 2);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double)(3));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double)(2));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1")).get(1), (double)(3), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2")).get(1), (double)(2), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -343,8 +342,8 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 1);
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5));
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts());
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts(), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -358,9 +357,9 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 1);
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5));
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts());
-			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()+entities[3].getNumClusters())+entities[4].getNumClusters());
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(0), (double)(5), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(1), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts())+entities[4].getNumHosts(), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("dc1")).get(2), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()+entities[3].getNumClusters())+entities[4].getNumClusters(), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -384,14 +383,14 @@ public class TestFlatAggregator {
 			}
 			Map<List<String>, List<Double>> result = agg.result();
 			Assert.assertEquals(result.size(), 4);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), (double)(entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double)(1));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack123")).get(1), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(0), (double)(entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "rack128")).get(1), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(0), (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack125")).get(1), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(0), (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "rack126")).get(1), (double)(1), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
index cbcab0f..96b36e9 100755
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
+++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
@@ -27,7 +27,7 @@ import org.apache.eagle.query.aggregate.timeseries.PostHierarchicalAggregateSort
 import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregateEntity;
 import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregator;
 import org.apache.eagle.query.aggregate.AggregateFunctionType;
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.eagle.query.aggregate.timeseries.SortOption;
 import org.codehaus.jackson.JsonFactory;
@@ -97,7 +97,7 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate", result);
 			Assert.assertEquals(result.getChildren().size(), 0);
-			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 
 			// test sort by function1
 			SortOption so = new SortOption();
@@ -108,7 +108,7 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			writeToJson("After sort" ,result);
 			Assert.assertEquals(null, result.getChildren());
 			Assert.assertEquals(0, result.getSortedList().size());
-			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -131,8 +131,8 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate" ,result);
 			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 			
 			// test sort by function 1
 			SortOption so = new SortOption();
@@ -147,12 +147,12 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			Assert.assertEquals(true, it.hasNext());
 			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
 			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 			
 			Assert.assertEquals(true, it.hasNext());
 			entry = it.next();
 			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -166,8 +166,8 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate" , result);
 			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001);
 			
 			// test sort by function 1
 			SortOption so = new SortOption();
@@ -182,12 +182,12 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			Assert.assertEquals(true, it.hasNext());
 			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
 			Assert.assertEquals("dc2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[2].getNumHosts()));
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001);
 			
 			Assert.assertEquals(true, it.hasNext());
 			entry = it.next();
 			Assert.assertEquals("dc1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));			
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -202,11 +202,11 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			writeToJson("After aggregate" , result);
 			Assert.assertEquals(result.getChildren().size(), 2);
 			Assert.assertEquals(2, result.getChildren().get("cluster1").getValues().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()));
+			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.0001);
 			Assert.assertEquals(2, result.getChildren().get("cluster2").getValues().size());
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()));
+			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.0001);
 			
 			// test sort by function 2
 			SortOption so = new SortOption();
@@ -221,12 +221,12 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			Assert.assertEquals(true, it.hasNext());
 			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
 			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()));
+			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()), 0.0001);
 			
 			Assert.assertEquals(true, it.hasNext());
 			entry = it.next();
 			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()));
+			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -250,15 +250,15 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate", result);
 			Assert.assertEquals(2, result.getChildren().size());
-			Assert.assertEquals(66.0, (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
+			Assert.assertEquals(66.0, (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
 			Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()), 0.0001);
 			
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 			Assert.assertEquals(1, result.getChildren().get("cluster2").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 			
 			// test sort by function 2
 			SortOption so = new SortOption();
@@ -273,12 +273,12 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			Assert.assertEquals(true, it.hasNext());
 			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
 			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()), 0.0001);
 			
 			Assert.assertEquals(true, it.hasNext());
 			entry = it.next();
 			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));			
+			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -301,8 +301,8 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate", result);
 			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts())+entities[4].getNumHosts());
-			Assert.assertEquals(result.getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()+entities[3].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts())+entities[4].getNumHosts(), 0.0001);
+			Assert.assertEquals(result.getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()+entities[3].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -316,14 +316,13 @@ private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggreg
 			HierarchicalAggregateEntity result = agg.result();
 			writeToJson("After aggregate", result);
 			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
 			Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()), 0.0001);
 			
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("unassigned").getValues().get(0), (double)(entities[3].getNumHosts()));
+			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[4].getNumHosts()), 0.0001);
+			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("unassigned").getValues().get(0), (double)(entities[3].getNumHosts()), 0.0001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
index 9751e27..6850d2c 100644
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
+++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
@@ -23,11 +23,11 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.eagle.query.aggregate.timeseries.PostFlatAggregateSort;
-import junit.framework.Assert;
 
 import org.apache.log4j.Logger;
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
 import org.junit.Test;
 
 import org.apache.eagle.query.aggregate.timeseries.SortOption;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
index d953cfa..b72bdb7 100755
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
+++ b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
@@ -23,9 +23,8 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator;
-import junit.framework.Assert;
-
 import org.apache.eagle.query.aggregate.AggregateFunctionType;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -69,22 +68,22 @@ public class TestTimeSeriesAggregator {
 			}
 			Map<List<String>, List<Double>> result = tsAgg.result();
 			Assert.assertEquals(result.size(), 6);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(entities[7].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(entities[0].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(entities[7].getNumHosts()), 0.001);
 			
 			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
 			Assert.assertEquals(tsResult.size(), 2);
 			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(entities[7].getNumHosts()));
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(entities[0].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(entities[7].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -98,20 +97,20 @@ public class TestTimeSeriesAggregator {
 			}
 			Map<List<String>, List<Double>> result = tsAgg.result();
 			Assert.assertEquals(result.size(), 5);
-			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()));
+			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(entities[0].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()), 0.001);
 			
 			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
 			Assert.assertEquals(tsResult.size(), 1);
 			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()));		
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(entities[0].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(entities[3].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(entities[4].getNumHosts()), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -125,22 +124,22 @@ public class TestTimeSeriesAggregator {
 			}
 			Map<List<String>, List<Double>> result = tsAgg.result();
 			Assert.assertEquals(result.size(), 6);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(1));
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(1), 0.001);
 			
 			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
 			Assert.assertEquals(tsResult.size(), 2);
 			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(2));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(2));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(1));
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(2), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(2), 0.001);
+			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(1), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");
@@ -154,20 +153,20 @@ public class TestTimeSeriesAggregator {
 			}
 			Map<List<String>, List<Double>> result = tsAgg.result();
 			Assert.assertEquals(result.size(), 5);
-			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(3));
+			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(2), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(1), 0.001);
+			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(3), 0.001);
 			
 			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
 			Assert.assertEquals(tsResult.size(), 1);
 			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(2));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(3));		
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(2), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(1), 0.001);
+			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(3), 0.001);
 		}catch(Exception ex){
 			LOG.error("Can not aggregate", ex);
 			Assert.fail("Can not aggregate");

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestHBaseLogReader2.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestHBaseLogReader2.java b/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestHBaseLogReader2.java
index 432cbdb..701a805 100755
--- a/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestHBaseLogReader2.java
+++ b/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestHBaseLogReader2.java
@@ -28,6 +28,7 @@ import org.apache.eagle.query.ListQueryCompiler;
 import org.apache.eagle.service.hbase.EmbeddedHbase;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Assert;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -38,6 +39,7 @@ import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 
+@Ignore
 public class TestHBaseLogReader2 {
 	private final static Logger LOG = LoggerFactory.getLogger(TestHBaseLogReader2.class);
     private static EmbeddedHbase hbase = EmbeddedHbase.getInstance();

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestListQueryResource.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestListQueryResource.java b/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestListQueryResource.java
index a53f980..7384c70 100755
--- a/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestListQueryResource.java
+++ b/eagle-core/eagle-query/eagle-service-base/src/test/java/org/apache/eagle/service/generic/TestListQueryResource.java
@@ -31,10 +31,7 @@ import org.apache.eagle.storage.hbase.query.coprocessor.impl.AggregateClientImpl
 import org.apache.eagle.common.DateTimeUtil;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Scan;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.*;
 
 import java.io.IOException;
 import java.util.ArrayList;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-base/src/test/java/org/apache/eagle/storage/TestUri.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-base/src/test/java/org/apache/eagle/storage/TestUri.java b/eagle-core/eagle-query/eagle-storage-base/src/test/java/org/apache/eagle/storage/TestUri.java
index 9f42e29..0976bbb 100644
--- a/eagle-core/eagle-query/eagle-storage-base/src/test/java/org/apache/eagle/storage/TestUri.java
+++ b/eagle-core/eagle-query/eagle-storage-base/src/test/java/org/apache/eagle/storage/TestUri.java
@@ -16,7 +16,7 @@
  */
 package org.apache.eagle.storage;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.junit.Test;
 
 import java.net.URI;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/TestHBaseStatement.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/TestHBaseStatement.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/TestHBaseStatement.java
index 5572724..f8e6c23 100644
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/TestHBaseStatement.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/TestHBaseStatement.java
@@ -24,12 +24,9 @@ import java.util.List;
 import org.apache.eagle.log.entity.meta.EntityDefinition;
 import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
 import org.apache.eagle.storage.operation.CreateStatement;
-import junit.framework.Assert;
 
 import org.apache.eagle.storage.operation.QueryStatement;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.*;
 
 import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity;
 import org.apache.eagle.service.hbase.TestHBaseBase;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestAggregateResultCallback.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestAggregateResultCallback.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestAggregateResultCallback.java
index 4e7254e..ba1b781 100755
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestAggregateResultCallback.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestAggregateResultCallback.java
@@ -24,13 +24,15 @@ import org.apache.eagle.query.aggregate.raw.GroupbyKey;
 import org.apache.eagle.query.aggregate.raw.GroupbyKeyValue;
 import org.apache.eagle.query.aggregate.raw.GroupbyValue;
 import org.apache.eagle.common.ByteUtil;
-import junit.framework.Assert;
+import org.junit.Assert;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
+@Ignore
 public class TestAggregateResultCallback {
     @Test
     public void testUpdate(){
@@ -82,15 +84,15 @@ public class TestAggregateResultCallback {
 //        Assert.assertEquals("a",new String(row0.getKey().getValue().get(0).copyBytes()));
 //        Assert.assertEquals("b",new String(row0.getKey().getValue().get(1).copyBytes()));
         Assert.assertEquals(new GroupbyKey(Arrays.asList("a".getBytes(),"b".getBytes(),"c".getBytes())),row0.getKey());
-        Assert.assertEquals(4.0,row0.getValue().get(0).get());
+        Assert.assertEquals(4.0,row0.getValue().get(0).get(), 0.00001);
         Assert.assertEquals(10, ByteUtil.bytesToInt(row0.getValue().getMeta(0).getBytes()));
-        Assert.assertEquals(3.0, row0.getValue().get(1).get());
+        Assert.assertEquals(3.0, row0.getValue().get(1).get(), 0.00001);
         Assert.assertEquals(10, ByteUtil.bytesToInt(row0.getValue().getMeta(1).getBytes()));
-        Assert.assertEquals(10.0,row0.getValue().get(2).get());
+        Assert.assertEquals(10.0,row0.getValue().get(2).get(), 0.00001);
         Assert.assertEquals(10, ByteUtil.bytesToInt(row0.getValue().getMeta(2).getBytes()));
-        Assert.assertEquals(1.0,row0.getValue().get(3).get());
+        Assert.assertEquals(1.0,row0.getValue().get(3).get(), 0.00001);
         Assert.assertEquals(10, ByteUtil.bytesToInt(row0.getValue().getMeta(3).getBytes()));
-        Assert.assertEquals(13.0,row0.getValue().get(4).get());
+        Assert.assertEquals(13.0,row0.getValue().get(4).get(), 0.00001);
         Assert.assertEquals(10, ByteUtil.bytesToInt(row0.getValue().getMeta(4).getBytes()));
 
         // == ROW-#1 ==
@@ -100,15 +102,15 @@ public class TestAggregateResultCallback {
         // a,b      |       2        2          9           1           11       | 9
         GroupbyKeyValue row1 = callbackResult.getKeyValues().get(1);
         Assert.assertEquals(new GroupbyKey(Arrays.asList("a".getBytes(),"b".getBytes())),row1.getKey());
-        Assert.assertEquals(2.0,row1.getValue().get(0).get());
+        Assert.assertEquals(2.0,row1.getValue().get(0).get(), 0.00001);
         Assert.assertEquals(9, ByteUtil.bytesToInt(row1.getValue().getMeta(4).getBytes()));
-        Assert.assertEquals(2.0, row1.getValue().get(1).get());
+        Assert.assertEquals(2.0, row1.getValue().get(1).get(), 0.00001);
         Assert.assertEquals(9, ByteUtil.bytesToInt(row1.getValue().getMeta(4).getBytes()));
-        Assert.assertEquals(9.0,row1.getValue().get(2).get());
+        Assert.assertEquals(9.0,row1.getValue().get(2).get(), 0.00001);
         Assert.assertEquals(9, ByteUtil.bytesToInt(row1.getValue().getMeta(4).getBytes()));
-        Assert.assertEquals(1.0,row1.getValue().get(3).get());
+        Assert.assertEquals(1.0,row1.getValue().get(3).get(), 0.00001);
         Assert.assertEquals(9, ByteUtil.bytesToInt(row1.getValue().getMeta(4).getBytes()));
-        Assert.assertEquals(11.0,row1.getValue().get(4).get());
+        Assert.assertEquals(11.0,row1.getValue().get(4).get(), 0.00001);
         Assert.assertEquals(9, ByteUtil.bytesToInt(row1.getValue().getMeta(4).getBytes()));
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateClient.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateClient.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateClient.java
index e5e93c4..71c1110 100755
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateClient.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateClient.java
@@ -24,7 +24,6 @@ import java.util.List;
 
 import org.apache.eagle.common.config.EagleConfigFactory;
 import org.apache.eagle.storage.hbase.query.coprocessor.impl.AggregateClientImpl;
-import junit.framework.Assert;
 
 import org.apache.eagle.storage.hbase.query.coprocessor.AggregateClient;
 import org.apache.hadoop.hbase.client.HTableFactory;
@@ -33,7 +32,9 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DoubleWritable;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -224,9 +225,9 @@ public class TestGroupAggregateClient extends TestHBaseBase {
 			Assert.assertTrue(result.size() > 0);
 			Assert.assertEquals("test4UT", new String(result.get(0).getKey().getValue().get(0).copyBytes()));
 			Assert.assertEquals("dc1", new String(result.get(0).getKey().getValue().get(1).copyBytes()));
-			Assert.assertEquals(2.0, result.get(0).getValue().get(0).get());
-			Assert.assertEquals(2.0, result.get(0).getValue().get(1).get());
-			Assert.assertEquals(2.0, result.get(0).getValue().get(2).get());
+			Assert.assertEquals(2.0, result.get(0).getValue().get(0).get(), 0.00001);
+			Assert.assertEquals(2.0, result.get(0).getValue().get(1).get(), 0.00001);
+			Assert.assertEquals(2.0, result.get(0).getValue().get(2).get(), 0.00001);
 			Assert.assertTrue(num <= result.get(0).getValue().get(3).get());
 			Assert.assertTrue(2.0 * num <= result.get(0).getValue().get(4).get());
 		} catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
index d0197fb..e7c7b3b 100755
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
@@ -22,7 +22,7 @@ import java.util.HashMap;
 import java.util.List;
 
 import org.apache.eagle.common.config.EagleConfigFactory;
-import junit.framework.Assert;
+import org.junit.Assert;
 
 import org.apache.eagle.storage.hbase.query.coprocessor.AggregateClient;
 import org.apache.hadoop.hbase.client.HTableInterface;
@@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.DoubleWritable;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
index bd54c1c..d57f1a9 100644
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
@@ -19,7 +19,7 @@ package org.apache.eagle.storage.hbase.spi;
 import org.apache.eagle.storage.DataStorageManager;
 import org.apache.eagle.storage.exception.IllegalDataStorageTypeException;
 import org.apache.eagle.storage.hbase.HBaseStorage;
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.junit.Test;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestGenericMetricStorage.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestGenericMetricStorage.java b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestGenericMetricStorage.java
index 524b294..bb3459c 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestGenericMetricStorage.java
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestGenericMetricStorage.java
@@ -1,6 +1,5 @@
 package org.apache.eagle.storage.jdbc;
 
-import junit.framework.Assert;
 import org.apache.eagle.common.DateTimeUtil;
 import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.log.entity.meta.EntityDefinition;
@@ -10,6 +9,7 @@ import org.apache.eagle.storage.operation.CompiledQuery;
 import org.apache.eagle.storage.operation.RawQuery;
 import org.apache.eagle.storage.result.ModifyResult;
 import org.apache.eagle.storage.result.QueryResult;
+import org.junit.Assert;
 import org.junit.Test;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestJdbcStorage.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestJdbcStorage.java b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestJdbcStorage.java
index 005a61c..41e1900 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestJdbcStorage.java
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/java/org/apache/eagle/storage/jdbc/TestJdbcStorage.java
@@ -16,7 +16,7 @@
  */
 package org.apache.eagle.storage.jdbc;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.apache.commons.lang.time.StopWatch;
 import org.apache.eagle.common.DateTimeUtil;
 import org.apache.eagle.log.entity.meta.EntityDefinition;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-gc/src/test/java/org/apache/eagle/TestGCLogParser.java
----------------------------------------------------------------------
diff --git a/eagle-gc/src/test/java/org/apache/eagle/TestGCLogParser.java b/eagle-gc/src/test/java/org/apache/eagle/TestGCLogParser.java
index 468d0d6..8c36c80 100644
--- a/eagle-gc/src/test/java/org/apache/eagle/TestGCLogParser.java
+++ b/eagle-gc/src/test/java/org/apache/eagle/TestGCLogParser.java
@@ -19,7 +19,6 @@
 
 package org.apache.eagle;
 
-import junit.framework.Assert;
 import org.apache.eagle.gc.model.GCPausedEvent;
 import org.apache.eagle.gc.parser.full.ConcurrentModeFailureParser;
 import org.apache.eagle.gc.parser.full.NormalFullGCParser;
@@ -27,6 +26,7 @@ import org.apache.eagle.gc.parser.full.ParaNewPromotionFailureParser;
 import org.apache.eagle.gc.parser.tenured.CMSInitialMarkParser;
 import org.apache.eagle.gc.parser.tenured.CMSRemarkParser;
 import org.apache.eagle.gc.parser.young.ParaNewParser;
+import org.junit.Assert;
 import org.junit.Test;
 
 public class TestGCLogParser {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
index d966aab..4c7fe6d 100644
--- a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
+++ b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
@@ -16,11 +16,8 @@
  */
 package org.apache.eagle.hadoop.metric;
 
-import junit.framework.Assert;
 import org.junit.Test;
 
-import java.util.Map;
-
 /**
  * Created on 1/19/16.
  */

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
index 0fd85dd..1a18655 100644
--- a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
+++ b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
@@ -16,7 +16,7 @@
  */
 package org.apache.eagle.hadoop.metric;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.wso2.siddhi.core.ExecutionPlanRuntime;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/NewKafkaSourcedSpoutProvider.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/NewKafkaSourcedSpoutProvider.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/NewKafkaSourcedSpoutProvider.java
new file mode 100644
index 0000000..47d3b21
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/NewKafkaSourcedSpoutProvider.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.eagle.security.topo;
+
+import backtype.storm.spout.Scheme;
+import backtype.storm.spout.SchemeAsMultiScheme;
+import backtype.storm.topology.base.BaseRichSpout;
+import com.typesafe.config.Config;
+import org.apache.eagle.dataproc.impl.storm.StormSpoutProvider;
+import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutScheme;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import storm.kafka.BrokerHosts;
+import storm.kafka.KafkaSpout;
+import storm.kafka.SpoutConfig;
+import storm.kafka.ZkHosts;
+
+import java.util.Arrays;
+
+/**
+ * Since 6/8/16.
+ */
+public class NewKafkaSourcedSpoutProvider implements StormSpoutProvider {
+    private final static Logger LOG = LoggerFactory.getLogger(NewKafkaSourcedSpoutProvider.class);
+
+    private String configPrefix = "dataSourceConfig";
+
+    public NewKafkaSourcedSpoutProvider(){}
+
+    public NewKafkaSourcedSpoutProvider(String prefix){
+        this.configPrefix = prefix;
+    }
+
+    @Override
+    public BaseRichSpout getSpout(Config config){
+        Config context = config;
+        if(this.configPrefix!=null) context = config.getConfig(configPrefix);
+        // Kafka topic
+        String topic = context.getString("topic");
+        // Kafka consumer group id
+        String groupId = context.getString("consumerGroupId");
+        // Kafka fetch size
+        int fetchSize = context.getInt("fetchSize");
+        // Kafka deserializer class
+        String deserClsName = context.getString("deserializerClass");
+        // Kafka broker zk connection
+        String zkConnString = context.getString("zkConnection");
+        // transaction zkRoot
+        String zkRoot = context.getString("transactionZKRoot");
+
+        LOG.info(String.format("Use topic id: %s",topic));
+
+        String brokerZkPath = null;
+        if(context.hasPath("brokerZkPath")) {
+            brokerZkPath = context.getString("brokerZkPath");
+        }
+
+        BrokerHosts hosts;
+        if(brokerZkPath == null) {
+            hosts = new ZkHosts(zkConnString);
+        } else {
+            hosts = new ZkHosts(zkConnString, brokerZkPath);
+        }
+
+        SpoutConfig spoutConfig = new SpoutConfig(hosts,
+                topic,
+                zkRoot + "/" + topic,
+                groupId);
+
+        // transaction zkServers
+        spoutConfig.zkServers = Arrays.asList(context.getString("transactionZKServers").split(","));
+        // transaction zkPort
+        spoutConfig.zkPort = context.getInt("transactionZKPort");
+        // transaction update interval
+        spoutConfig.stateUpdateIntervalMs = context.getLong("transactionStateUpdateMS");
+        // Kafka fetch size
+        spoutConfig.fetchSizeBytes = fetchSize;
+        // "startOffsetTime" is for test usage, prod should not use this
+        if (context.hasPath("startOffsetTime")) {
+            spoutConfig.startOffsetTime = context.getInt("startOffsetTime");
+        }
+        // "forceFromStart" is for test usage, prod should not use this
+        if (context.hasPath("forceFromStart")) {
+            spoutConfig.forceFromStart = context.getBoolean("forceFromStart");
+        }
+
+        if (context.hasPath("schemeCls")) {
+            try {
+                Scheme s = (Scheme)Class.forName(context.getString("schemeCls")).newInstance();
+                spoutConfig.scheme = new SchemeAsMultiScheme(s);
+            }catch(Exception ex){
+                LOG.error("error instantiating scheme object");
+                throw new IllegalStateException(ex);
+            }
+        }else{
+            String err = "schemeCls must be present";
+            LOG.error(err);
+            throw new IllegalStateException(err);
+        }
+        return new KafkaSpout(spoutConfig);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/TopologySubmitter.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/TopologySubmitter.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/TopologySubmitter.java
new file mode 100644
index 0000000..af737a3
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/topo/TopologySubmitter.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.eagle.security.topo;
+
+import backtype.storm.LocalCluster;
+import backtype.storm.StormSubmitter;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.utils.Utils;
+import com.typesafe.config.Config;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import storm.kafka.bolt.KafkaBolt;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Since 6/8/16.
+ */
+public class TopologySubmitter {
+    public final static String LOCAL_MODE = "topology.localMode";
+    public final static String MESSAGE_TIMEOUT_SECS = "topology.messageTimeoutSecs";
+    public final static String TOTAL_WORKER_NUM = "topology.numOfTotalWorkers";
+    public final static int DEFAULT_MESSAGE_TIMEOUT_SECS = 3600;
+
+    private static Logger LOG = LoggerFactory.getLogger(TopologySubmitter.class);
+
+    public static void submit(StormTopology topology, Config config){
+        backtype.storm.Config stormConfig = new backtype.storm.Config();
+        int messageTimeoutSecs = config.hasPath(MESSAGE_TIMEOUT_SECS)?config.getInt(MESSAGE_TIMEOUT_SECS) : DEFAULT_MESSAGE_TIMEOUT_SECS;
+        LOG.info("Set topology.message.timeout.secs as {}",messageTimeoutSecs);
+        stormConfig.setMessageTimeoutSecs(messageTimeoutSecs);
+
+        // set kafka sink
+        if(config.hasPath("dataSinkConfig.brokerList")){
+            Map props = new HashMap<>();
+            props.put("metadata.broker.list", config.getString("dataSinkConfig.brokerList"));
+            props.put("serializer.class", config.getString("dataSinkConfig.serializerClass"));
+            props.put("key.serializer.class", config.getString("dataSinkConfig.keySerializerClass"));
+            stormConfig.put(KafkaBolt.KAFKA_BROKER_PROPERTIES, props);
+        }
+
+        if(config.hasPath("dataSinkConfig.serializerClass")){
+            stormConfig.put(KafkaBolt.TOPIC, config.getString("dataSinkConfig.topic"));
+        }
+
+        if(config.hasPath("dataSinkConfig.topic")){
+            stormConfig.put(KafkaBolt.TOPIC, config.getString("dataSinkConfig.topic"));
+        }
+
+        boolean localMode = config.getBoolean(LOCAL_MODE);
+        int numOfTotalWorkers = config.getInt(TOTAL_WORKER_NUM);
+        stormConfig.setNumWorkers(numOfTotalWorkers);
+        String topologyId = config.getString("topology.name");
+        if(localMode) {
+            LOG.info("Submitting as local mode");
+            LocalCluster cluster = new LocalCluster();
+            cluster.submitTopology(topologyId, stormConfig, topology);
+            Utils.sleep(Long.MAX_VALUE);
+        }else{
+            LOG.info("Submitting as cluster mode");
+            try {
+                StormSubmitter.submitTopologyWithProgressBar(topologyId, stormConfig, topology);
+            } catch(Exception ex) {
+                LOG.error("fail submitting topology {}", topology, ex);
+                throw new IllegalStateException(ex);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
index e5e4ae0..fe0c8e7 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/util/ExternalDataJoiner.java
@@ -43,6 +43,7 @@ public class ExternalDataJoiner {
 	private Scheduler sched;
 	private JobDataMap jobDataMap;
 	private Class<? extends Job> jobCls;
+	private String id;
 	private final static String SCHEDULER_NAME = "OuterDataJoiner.scheduler";
 	
 	private static final String DATA_JOIN_POLL_INTERVALSEC = "dataJoinPollIntervalSec";
@@ -61,7 +62,8 @@ public class ExternalDataJoiner {
 		init(jobCls, prop);
 	}
 	
-	public ExternalDataJoiner(Class<? extends Job> jobCls, Config config) throws Exception{
+	public ExternalDataJoiner(Class<? extends Job> jobCls, Config config, String id) throws Exception{
+		this.id = id;
 		Map<String, Object> map = new HashMap<String, Object>();
         for(Map.Entry<String, ConfigValue> entry : config.getObject("eagleProps").entrySet()){
             map.put(entry.getKey(), entry.getValue().unwrapped());
@@ -78,7 +80,7 @@ public class ExternalDataJoiner {
 	
 	public void start(){
 		// for job
-		String group = String.format("%s.%s.%s", QUARTZ_GROUP_NAME, jobDataMap.getString(EagleConfigConstants.SITE), jobDataMap.getString(EagleConfigConstants.APPLICATION));
+		String group = String.format("%s.%s.%s.%s", QUARTZ_GROUP_NAME, jobDataMap.getString(EagleConfigConstants.SITE), jobDataMap.getString(EagleConfigConstants.APPLICATION), id);
 		JobDetail job = JobBuilder.newJob(jobCls)
 		     .withIdentity(jobCls.getName() + ".job", group)
 		     .usingJobData(jobDataMap)
@@ -88,7 +90,7 @@ public class ExternalDataJoiner {
 		Object interval = jobDataMap.get(DATA_JOIN_POLL_INTERVALSEC);
         int dataJoinPollIntervalSec = (interval == null ? defaultIntervalSeconds : Integer.parseInt(interval.toString()));
 		Trigger trigger = TriggerBuilder.newTrigger() 
-			  .withIdentity(jobCls.getName() + ".trigger", QUARTZ_GROUP_NAME) 
+			  .withIdentity(jobCls.getName() + ".trigger", group)
 		      .startNow() 
 		      .withSchedule(SimpleScheduleBuilder.simpleSchedule() 
 		          .withIntervalInSeconds(dataJoinPollIntervalSec)

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestHDFSAuditLogParser.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestHDFSAuditLogParser.java b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestHDFSAuditLogParser.java
index cea7237..5f22b8b 100644
--- a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestHDFSAuditLogParser.java
+++ b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestHDFSAuditLogParser.java
@@ -21,7 +21,7 @@ package org.apache.eagle.security.crawler.audit;
 
 import org.apache.eagle.security.hdfs.HDFSAuditLogParser;
 import org.apache.eagle.security.hdfs.HDFSAuditLogObject;
-import junit.framework.Assert;
+import org.junit.Assert;
 import org.junit.Test;
 
 public class TestHDFSAuditLogParser {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMAPRFSAuditLogParser.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMAPRFSAuditLogParser.java b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMAPRFSAuditLogParser.java
index f0eaf10..5e74a7e 100644
--- a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMAPRFSAuditLogParser.java
+++ b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMAPRFSAuditLogParser.java
@@ -16,10 +16,9 @@
  */
 package org.apache.eagle.security.crawler.audit;
 
-import junit.framework.Assert;
-import org.apache.eagle.security.hdfs.HDFSAuditLogParser;
 import org.apache.eagle.security.hdfs.MAPRFSAuditLogObject;
 import org.apache.eagle.security.hdfs.MAPRFSAuditLogParser;
+import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMetaDataAccessConfigRepo.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMetaDataAccessConfigRepo.java b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMetaDataAccessConfigRepo.java
index 93deeab..325663b 100644
--- a/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMetaDataAccessConfigRepo.java
+++ b/eagle-security/eagle-security-common/src/test/java/org/apache/eagle/security/crawler/audit/TestMetaDataAccessConfigRepo.java
@@ -20,12 +20,10 @@ package org.apache.eagle.security.crawler.audit;
 
 
 import com.typesafe.config.*;
-import junit.framework.Assert;
 import org.apache.eagle.common.config.EagleConfigConstants;
+import org.junit.Assert;
 import org.junit.Test;
 
-import java.util.Map;
-
 public class TestMetaDataAccessConfigRepo {
 
     @Test

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-hbase-auditlog/README.md
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/README.md b/eagle-security/eagle-security-hbase-auditlog/README.md
new file mode 100644
index 0000000..e7e338c
--- /dev/null
+++ b/eagle-security/eagle-security-hbase-auditlog/README.md
@@ -0,0 +1,25 @@
+<!--
+{% comment %}
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+{% endcomment %}
+-->
+
+#### create topic in sandbox
+/usr/hdp/2.2.4.2-2/kafka/bin/kafka-topics.sh --create --topic sandbox_hbase_audit_log --partitions 2 --replication-factor 1 --zookeeper localhost:2181
+
+
+#### produce a message in sandbox
+/usr/hdp/2.2.4.2-2/kafka/bin/kafka-console-producer.sh --topic sandbox_hbase_audit_log --broker-list sandbox.hortonworks.com:6667

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/eef4930c/eagle-security/eagle-security-hbase-auditlog/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/pom.xml b/eagle-security/eagle-security-hbase-auditlog/pom.xml
new file mode 100644
index 0000000..a8b82c7
--- /dev/null
+++ b/eagle-security/eagle-security-hbase-auditlog/pom.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ /*
+  ~  * Licensed to the Apache Software Foundation (ASF) under one or more
+  ~  * contributor license agreements.  See the NOTICE file distributed with
+  ~  * this work for additional information regarding copyright ownership.
+  ~  * The ASF licenses this file to You under the Apache License, Version 2.0
+  ~  * (the "License"); you may not use this file except in compliance with
+  ~  * the License.  You may obtain a copy of the License at
+  ~  *
+  ~  *    http://www.apache.org/licenses/LICENSE-2.0
+  ~  *
+  ~  * Unless required by applicable law or agreed to in writing, software
+  ~  * distributed under the License is distributed on an "AS IS" BASIS,
+  ~  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~  * See the License for the specific language governing permissions and
+  ~  * limitations under the License.
+  ~  */
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>eagle-security-parent</artifactId>
+        <groupId>org.apache.eagle</groupId>
+        <version>0.5.0-incubating-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>eagle-security-hbase-auditlog</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.eagle</groupId>
+            <artifactId>eagle-security-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.eagle</groupId>
+            <artifactId>eagle-stream-application-manager</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+    </dependencies>
+</project>


Mime
View raw message