eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From h..@apache.org
Subject [13/51] [partial] incubator-eagle git commit: EAGLE-184 Migrate eagle website from https://github.com/eaglemonitoring/eaglemonitoring.github.io to document branch
Date Thu, 03 Mar 2016 18:09:46 GMT
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
deleted file mode 100755
index cbcab0f..0000000
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestHierarchicalAggregator.java
+++ /dev/null
@@ -1,332 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.query.aggregate.test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.eagle.query.aggregate.timeseries.PostHierarchicalAggregateSort;
-import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregateEntity;
-import org.apache.eagle.query.aggregate.timeseries.HierarchicalAggregator;
-import org.apache.eagle.query.aggregate.AggregateFunctionType;
-import junit.framework.Assert;
-
-import org.apache.eagle.query.aggregate.timeseries.SortOption;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.eagle.log.entity.test.TestEntity;
-
-
-public class TestHierarchicalAggregator {
-private final static Logger LOG = LoggerFactory.getLogger(TestHierarchicalAggregator.class);
-
-	@SuppressWarnings("serial")
-	private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters){
-		TestEntity entity = new TestEntity();
-		Map<String, String> tags = new HashMap<String, String>(){{
-			put("cluster", cluster);
-			put("datacenter", datacenter);
-			put("rack", rack);
-		}}; 
-		entity.setTags(tags);
-		entity.setNumHosts(numHosts);
-		entity.setNumClusters(numClusters);
-		return entity;
-	}
-	
-	@SuppressWarnings("serial")
-	private TestEntity createEntityWithoutDatacenter(final String cluster, final String rack, int numHosts, long numClusters){
-		TestEntity entity = new TestEntity();
-		Map<String, String> tags = new HashMap<String, String>(){{
-			put("cluster", cluster);
-			put("rack", rack);
-		}}; 
-		entity.setTags(tags);
-		entity.setNumHosts(numHosts);
-		entity.setNumClusters(numClusters);
-		return entity;
-	}
-
-	private void writeToJson(String message, Object obj){
-		JsonFactory factory = new JsonFactory();
-		ObjectMapper mapper = new ObjectMapper(factory);
-		try{
-			String result = mapper.writeValueAsString(obj);
-			LOG.info(message + ":\n" + result);
-		}catch(Exception ex){
-			LOG.error("Can not write json", ex);
-			Assert.fail("Can not write json");
-		}
-	}
-	
-	@Test
-	public void testZeroGropubyFieldHierarchicalAggregator(){ 
-		TestEntity[] entities = new TestEntity[5];
-		entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2);
-		entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1);
-		entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0);
-		entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2);
-		entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2);
-		HierarchicalAggregator agg = new HierarchicalAggregator(new ArrayList<String>(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate", result);
-			Assert.assertEquals(result.getChildren().size(), 0);
-			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-
-			// test sort by function1
-			SortOption so = new SortOption();
-			so.setIndex(0);
-			so.setAscendant(true);
-			List<SortOption> sortOptions = Arrays.asList(so);
-			PostHierarchicalAggregateSort.sort(result, sortOptions);
-			writeToJson("After sort" ,result);
-			Assert.assertEquals(null, result.getChildren());
-			Assert.assertEquals(0, result.getSortedList().size());
-			Assert.assertEquals(result.getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-	}
-	
-	@Test
-	public void testSingleGropubyFieldHierarchicalAggregator(){ 
-		TestEntity[] entities = new TestEntity[5];
-		entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2);
-		entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1);
-		entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0);
-		entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2);
-		entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2);
-		HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate" ,result);
-			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			
-			// test sort by function 1
-			SortOption so = new SortOption();
-			so.setIndex(0);
-			so.setAscendant(true);
-			List<SortOption> sortOptions = Arrays.asList(so);
-			PostHierarchicalAggregateSort.sort(result, sortOptions);
-			writeToJson("After sort" ,result);
-			Assert.assertEquals(null, result.getChildren());
-			Assert.assertEquals(2, result.getSortedList().size(), 2);
-			Iterator<Map.Entry<String, HierarchicalAggregateEntity>> it = result.getSortedList().iterator();
-			Assert.assertEquals(true, it.hasNext());
-			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
-			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			
-			Assert.assertEquals(true, it.hasNext());
-			entry = it.next();
-			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate" , result);
-			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()));
-			
-			// test sort by function 1
-			SortOption so = new SortOption();
-			so.setIndex(0);
-			so.setAscendant(true);
-			List<SortOption> sortOptions = Arrays.asList(so);
-			PostHierarchicalAggregateSort.sort(result, sortOptions);
-			writeToJson("After sort" ,result);
-			Assert.assertEquals(null, result.getChildren());
-			Assert.assertEquals(2, result.getSortedList().size(), 2);
-			Iterator<Map.Entry<String, HierarchicalAggregateEntity>> it = result.getSortedList().iterator();
-			Assert.assertEquals(true, it.hasNext());
-			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
-			Assert.assertEquals("dc2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[2].getNumHosts()));
-			
-			Assert.assertEquals(true, it.hasNext());
-			entry = it.next();
-			Assert.assertEquals("dc1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));			
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		agg = new HierarchicalAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum, AggregateFunctionType.sum), Arrays.asList("numHosts", "numClusters"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate" , result);
-			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(2, result.getChildren().get("cluster1").getValues().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()));
-			Assert.assertEquals(2, result.getChildren().get("cluster2").getValues().size());
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()));
-			
-			// test sort by function 2
-			SortOption so = new SortOption();
-			so.setIndex(1);
-			so.setAscendant(true);
-			List<SortOption> sortOptions = Arrays.asList(so);
-			PostHierarchicalAggregateSort.sort(result, sortOptions);
-			writeToJson("After sort" ,result);
-			Assert.assertEquals(null, result.getChildren());
-			Assert.assertEquals(2, result.getSortedList().size(), 2);
-			Iterator<Map.Entry<String, HierarchicalAggregateEntity>> it = result.getSortedList().iterator();
-			Assert.assertEquals(true, it.hasNext());
-			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
-			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[0].getNumClusters()+entities[1].getNumClusters()+entities[2].getNumClusters()));
-			
-			Assert.assertEquals(true, it.hasNext());
-			entry = it.next();
-			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(1), (double)(entities[3].getNumClusters()+entities[4].getNumClusters()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-	}
-	
-	
-	@Test
-	public void testMultipleGropubyFieldsHierarchicalAggregator(){ 
-		TestEntity[] entities = new TestEntity[5];
-		entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2);
-		entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1);
-		entities[2] = createEntity("cluster1", "dc2", "rack128", 10, 0);
-		entities[3] = createEntity("cluster2", "dc1", "rack125", 9, 2);
-		entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2);
-		HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate", result);
-			Assert.assertEquals(2, result.getChildren().size());
-			Assert.assertEquals(66.0, (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()+entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc2").getValues().get(0), (double)(entities[2].getNumHosts()));
-			
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(1, result.getChildren().get("cluster2").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			
-			// test sort by function 2
-			SortOption so = new SortOption();
-			so.setIndex(0);
-			so.setAscendant(true);
-			List<SortOption> sortOptions = Arrays.asList(so);
-			PostHierarchicalAggregateSort.sort(result, sortOptions);
-			writeToJson("After sort" ,result);
-			Assert.assertEquals(null, result.getChildren());
-			Assert.assertEquals(2, result.getSortedList().size());
-			Iterator<Map.Entry<String, HierarchicalAggregateEntity>> it = result.getSortedList().iterator();
-			Assert.assertEquals(true, it.hasNext());
-			Map.Entry<String, HierarchicalAggregateEntity> entry = it.next();
-			Assert.assertEquals("cluster2", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			
-			Assert.assertEquals(true, it.hasNext());
-			entry = it.next();
-			Assert.assertEquals("cluster1", entry.getKey());
-			Assert.assertEquals(entry.getValue().getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));			
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-	}
-	
-	@Test
-	public void testUnassigned(){ 
-		TestEntity[] entities = new TestEntity[5];
-		entities[0] = createEntityWithoutDatacenter("cluster1", "rack123", 12, 2);
-		entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1);
-		entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0);
-		entities[3] = createEntityWithoutDatacenter("cluster2", "rack125", 9, 2);
-		entities[4] = createEntity("cluster2", "dc1", "rack126", 15, 2);
-		HierarchicalAggregator agg = new HierarchicalAggregator(Arrays.asList("datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate", result);
-			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts())+entities[4].getNumHosts());
-			Assert.assertEquals(result.getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()+entities[3].getNumHosts()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		agg = new HierarchicalAggregator(Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"));
-		try{
-			for(TestEntity e : entities){
-				agg.accumulate(e);
-			}
-			HierarchicalAggregateEntity result = agg.result();
-			writeToJson("After aggregate", result);
-			Assert.assertEquals(result.getChildren().size(), 2);
-			Assert.assertEquals(result.getChildren().get("cluster1").getValues().get(0), (double)(entities[0].getNumHosts()+entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(2, result.getChildren().get("cluster1").getChildren().size());
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("dc1").getValues().get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster1").getChildren().get("unassigned").getValues().get(0), (double)(entities[0].getNumHosts()));
-			
-			Assert.assertEquals(result.getChildren().get("cluster2").getValues().get(0), (double)(entities[3].getNumHosts()+entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("dc1").getValues().get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.getChildren().get("cluster2").getChildren().get("unassigned").getValues().get(0), (double)(entities[3].getNumHosts()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
deleted file mode 100644
index 9751e27..0000000
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestPostFlatAggregateSort.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.query.aggregate.test;
-
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.eagle.query.aggregate.timeseries.PostFlatAggregateSort;
-import junit.framework.Assert;
-
-import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.junit.Test;
-
-import org.apache.eagle.query.aggregate.timeseries.SortOption;
-
-public class TestPostFlatAggregateSort {
-	private static final Logger logger = Logger.getLogger(TestPostFlatAggregateSort.class);
-	@Test
-	public void testSort(){
-		final String aggField1Value1 = "field1value1";
-		final String aggField1Value2 = "field1value2";
-		final String aggField2Value1 = "field2value1";
-		final String aggField2Value2 = "field2value2";
-		final Double d1 = new Double(1);
-		final Double d2 = new Double(2);
-		final Double d3 = new Double(3);
-		final Double d4 = new Double(4);
-		@SuppressWarnings("serial")
-		Map<List<String>, List<Double>> result = new HashMap<List<String>, List<Double>>(){{
-			put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2, d3));
-			put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1, d4));
-		}};
-		
-		// sort by function1
-		SortOption so = new SortOption();
-		so.setIndex(0);
-		so.setAscendant(true);
-		List<SortOption> sortOptions = Arrays.asList(so);
-		List<Map.Entry<List<String>, List<Double>>> set = 
-				PostFlatAggregateSort.sort(result, sortOptions, 0);
-		JsonFactory factory = new JsonFactory();
-		ObjectMapper mapper = new ObjectMapper(factory);
-		Assert.assertEquals(2, set.size());
-		Iterator<Map.Entry<List<String>, List<Double>>> it = set.iterator();
-		Map.Entry<List<String>, List<Double>> e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2));
-		Assert.assertTrue(e.getValue().get(0).equals(d1));
-		e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1));
-		Assert.assertTrue(e.getValue().get(0).equals(d2));
-		try{
-			String value = mapper.writeValueAsString(set);
-			logger.info(value);
-		}catch(Exception ex){
-			logger.error("fail with mapping", ex);
-			Assert.fail("fail with mapping");
-		}
-		
-		
-		// sort by function2
-		so = new SortOption();
-		so.setIndex(1);
-		so.setAscendant(true);
-		sortOptions = Arrays.asList(so);
-		set = PostFlatAggregateSort.sort(result, sortOptions, 0);
-		factory = new JsonFactory();
-		mapper = new ObjectMapper(factory);
-		Assert.assertEquals(2, set.size());
-		it = set.iterator();
-		e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1));
-		Assert.assertTrue(e.getValue().get(0).equals(d2));
-		e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2));
-		Assert.assertTrue(e.getValue().get(0).equals(d1));
-		try{
-			String value = mapper.writeValueAsString(set);
-			logger.info(value);
-		}catch(Exception ex){
-			logger.error("fail with mapping", ex);
-			Assert.fail("fail with mapping");
-		}
-	}
-	
-	@Test
-	public void testDefaultSort(){
-		final String aggField1Value1 = "xyz";
-		final String aggField1Value2 = "xyz";
-		final String aggField2Value1 = "abd";
-		final String aggField2Value2 = "abc";
-		final Double d1 = new Double(1);
-		final Double d2 = new Double(1);
-		@SuppressWarnings("serial")
-		Map<List<String>, List<Double>> result = new HashMap<List<String>, List<Double>>(){{
-			put(Arrays.asList(aggField1Value1, aggField2Value1), Arrays.asList(d2));
-			put(Arrays.asList(aggField1Value2, aggField2Value2), Arrays.asList(d1));
-		}};
-		
-		// sort by function1
-		SortOption so = new SortOption();
-		so.setIndex(0);
-		so.setAscendant(true);
-		List<SortOption> sortOptions = Arrays.asList(so);
-		List<Map.Entry<List<String>, List<Double>>> set = 
-				PostFlatAggregateSort.sort(result, sortOptions, 0);
-		JsonFactory factory = new JsonFactory();
-		ObjectMapper mapper = new ObjectMapper(factory);
-		Assert.assertEquals(2, set.size());
-		Iterator<Map.Entry<List<String>, List<Double>>> it = set.iterator();
-		Map.Entry<List<String>, List<Double>> e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value2));
-		Assert.assertTrue(e.getValue().get(0).equals(d1));
-		e = it.next();
-		Assert.assertTrue(e.getKey().get(0).equals(aggField1Value1));
-		Assert.assertTrue(e.getValue().get(0).equals(d2));
-		try{
-			String value = mapper.writeValueAsString(set);
-			logger.info(value);
-		}catch(Exception ex){
-			logger.error("fail with mapping", ex);
-			Assert.fail("fail with mapping");
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java b/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
deleted file mode 100755
index d953cfa..0000000
--- a/eagle-core/eagle-query/eagle-query-base/src/test/java/org/apache/eagle/query/aggregate/test/TestTimeSeriesAggregator.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.query.aggregate.test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.eagle.query.aggregate.timeseries.TimeSeriesAggregator;
-import junit.framework.Assert;
-
-import org.apache.eagle.query.aggregate.AggregateFunctionType;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.eagle.log.entity.test.TestEntity;
-
-public class TestTimeSeriesAggregator {
-	private static final Logger LOG = LoggerFactory.getLogger(TestFlatAggregator.class);
-	@SuppressWarnings("serial")
-	private TestEntity createEntity(final String cluster, final String datacenter, final String rack, int numHosts, long numClusters, long timestamp){
-		TestEntity entity = new TestEntity();
-		Map<String, String> tags = new HashMap<String, String>(){{
-			put("cluster", cluster);
-			put("datacenter", datacenter);
-			put("rack", rack);
-		}}; 
-		entity.setTags(tags);
-		entity.setNumHosts(numHosts);
-		entity.setNumClusters(numClusters);
-		entity.setTimestamp(timestamp);
-		return entity;
-	}
-	
-	@Test
-	public void testTimeSeriesAggregator(){
-		TestEntity[] entities = new TestEntity[8];
-		entities[0] = createEntity("cluster1", "dc1", "rack123", 12, 2, 1386120000*1000); // bucket 0
-		entities[1] = createEntity("cluster1", "dc1", "rack123", 20, 1, 1386121060*1000); // bucket 17
-		entities[2] = createEntity("cluster1", "dc1", "rack128", 10, 0, 1386121070*1000); // bucket 17
-		entities[3] = createEntity("cluster2", "dc1", "rack125", 9,   2, 1386122122*1000); // bucket 35
-		entities[4] = createEntity("cluster2", "dc1", "rack126", 15,  5, 1386123210*1000); // bucket 53
-		entities[5] = createEntity("cluster2", "dc1", "rack234", 25,  1, 1386123480*1000); // bucket 58
-		entities[6] = createEntity("cluster2", "dc1", "rack234", 12,  0, 1386123481*1000); // bucket 58
-		entities[7] = createEntity("cluster1", "dc1", "rack123", 3,    2, 1386123482*1000); // bucket 58
-		
-		TimeSeriesAggregator tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"),
-				1386120000*1000, 1386123600*1000, 60*1000);
-		try{
-			for(TestEntity e : entities){
-				tsAgg.accumulate(e);
-			}
-			Map<List<String>, List<Double>> result = tsAgg.result();
-			Assert.assertEquals(result.size(), 6);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(entities[7].getNumHosts()));
-			
-			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
-			Assert.assertEquals(tsResult.size(), 2);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(entities[7].getNumHosts()));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		tsAgg = new TimeSeriesAggregator(new ArrayList<String>(), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("numHosts"), 
-				1386120000*1000, 1386123600*1000, 60*1000);
-		try{
-			for(TestEntity e : entities){
-				tsAgg.accumulate(e);
-			}
-			Map<List<String>, List<Double>> result = tsAgg.result();
-			Assert.assertEquals(result.size(), 5);
-			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()));
-			
-			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
-			Assert.assertEquals(tsResult.size(), 1);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(entities[0].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(entities[1].getNumHosts()+entities[2].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(entities[3].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(entities[4].getNumHosts()));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(entities[5].getNumHosts()+entities[6].getNumHosts()+entities[7].getNumHosts()));		
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		tsAgg = new TimeSeriesAggregator(Arrays.asList("cluster"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), 
-				1386120000*1000, 1386123600*1000, 60*1000);
-		try{
-			for(TestEntity e : entities){
-				tsAgg.accumulate(e);
-			}
-			Map<List<String>, List<Double>> result = tsAgg.result();
-			Assert.assertEquals(result.size(), 6);
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "0")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "17")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "35")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "53")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("cluster2", "58")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("cluster1", "58")).get(0), (double)(1));
-			
-			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
-			Assert.assertEquals(tsResult.size(), 2);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[0], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[17], (double)(2));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[35], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[53], (double)(1));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster2")).get(0)[58], (double)(2));
-			Assert.assertEquals(tsResult.get(Arrays.asList("cluster1")).get(0)[58], (double)(1));
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-		
-		tsAgg = new TimeSeriesAggregator(new ArrayList<String>(), Arrays.asList(AggregateFunctionType.count), Arrays.asList("*"), 
-				1386120000*1000, 1386123600*1000, 60*1000);
-		try{
-			for(TestEntity e : entities){
-				tsAgg.accumulate(e);
-			}
-			Map<List<String>, List<Double>> result = tsAgg.result();
-			Assert.assertEquals(result.size(), 5);
-			Assert.assertEquals(result.get(Arrays.asList("0")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("17")).get(0), (double)(2));
-			Assert.assertEquals(result.get(Arrays.asList("35")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("53")).get(0), (double)(1));
-			Assert.assertEquals(result.get(Arrays.asList("58")).get(0), (double)(3));
-			
-			Map<List<String>, List<double[]>> tsResult = tsAgg.getMetric();
-			Assert.assertEquals(tsResult.size(), 1);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0).length, 60);
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[0], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[17], (double)(2));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[35], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[53], (double)(1));
-			Assert.assertEquals(tsResult.get(new ArrayList<String>()).get(0)[58], (double)(3));		
-		}catch(Exception ex){
-			LOG.error("Can not aggregate", ex);
-			Assert.fail("Can not aggregate");
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-service-base/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/pom.xml b/eagle-core/eagle-query/eagle-service-base/pom.xml
deleted file mode 100755
index 32c0846..0000000
--- a/eagle-core/eagle-query/eagle-service-base/pom.xml
+++ /dev/null
@@ -1,77 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<parent>
-		<groupId>eagle</groupId>
-		<artifactId>eagle-query-parent</artifactId>
-		<version>0.3.0</version>
-        <relativePath>../pom.xml</relativePath>
-	</parent>
-
-	<artifactId>eagle-service-base</artifactId>
-	<packaging>jar</packaging>
-	<name>eagle-service-base</name>
-
-	<dependencies>
-        <!-- put eagle-storage-base dependency at the top for using asm 4.0 jar !-->
-        <dependency>
-            <groupId>eagle</groupId>
-            <artifactId>eagle-storage-base</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-		<dependency>
-			<groupId>com.sun.jersey</groupId>
-			<artifactId>jersey-server</artifactId>
-		</dependency>
-		<dependency>
-			<groupId>com.sun.jersey.contribs</groupId>
-			<artifactId>jersey-multipart</artifactId>
-			<version>${jersey.version}</version>
-		</dependency>
-        <dependency>
-            <groupId>eagle</groupId>
-            <artifactId>eagle-common</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>eagle</groupId>
-            <artifactId>eagle-storage-hbase</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>eagle</groupId>
-            <artifactId>eagle-embed-hbase</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>eagle</groupId>
-            <artifactId>eagle-embed-hbase</artifactId>
-            <version>${project.version}</version>
-            <classifier>tests</classifier>
-            <scope>test</scope>
-        </dependency>
-		<dependency>
-			<groupId>org.codehaus.jackson</groupId>
-			<artifactId>jackson-mapper-asl</artifactId>
-		</dependency>
-	</dependencies>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java
deleted file mode 100644
index 33aabab..0000000
--- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/EagleExceptionWrapper.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.service.common;
-
-public class EagleExceptionWrapper {
-	private final static int MAX_DEPTH = 10;
-	
-	public static String wrap(Exception ex){
-		return wrap(ex, EagleExceptionWrapper.MAX_DEPTH);
-	}
-	
-	public static String wrap(Exception ex, int maxdepth){
-		int d = maxdepth;
-		if(d <= 0)
-			d = EagleExceptionWrapper.MAX_DEPTH;
-		int index = 0;
-		StringBuffer sb = new StringBuffer();
-		sb.append(ex);
-		sb.append(System.getProperty("line.separator"));
-		for(StackTraceElement element : ex.getStackTrace()){
-			sb.append(element.toString());
-			sb.append(System.getProperty("line.separator"));
-			if(++index >= d)
-				break;
-		}
-		return sb.toString();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java
deleted file mode 100755
index ae9ecef..0000000
--- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/common/SplitFullScanEntityReader.java
+++ /dev/null
@@ -1,289 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.service.common;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.concurrent.ArrayBlockingQueue;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
-import org.apache.eagle.log.entity.GenericEntityBatchReader;
-import org.apache.eagle.log.entity.RowkeyBuilder;
-import org.apache.eagle.log.entity.SearchCondition;
-import org.apache.eagle.log.entity.meta.EntityDefinition;
-import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
-import org.apache.eagle.query.ListQueryCompiler;
-import org.apache.eagle.common.DateTimeUtil;
-import org.apache.eagle.common.EagleBase64Wrapper;
-
-/**
- * Support stream based entity read. Internally it splits entity fetching to multiple threads to improve 
- * the performance. However, it doesn't support multi-threading for client to read entities from result set.
- * 
- */
-public class SplitFullScanEntityReader<ENTITY extends TaggedLogAPIEntity> {
-
-	// class members
-	public static final int DEFAULT_BUFFER_SIZE = 10 * 1000;
-	public static final int MAX_WRITE_TIME_OUT_IN_SECONDS = 60;
-	private static final Logger LOG = LoggerFactory.getLogger(SplitFullScanEntityReader.class);
-	private static final TaggedLogAPIEntity COMPLETED_ENTITY = new TaggedLogAPIEntity();
-
-	// instance members
-	private final int splits;
-	private final String query;
-	private final long startTime;
-	private final long endTime;
-	private final String startRowkey;
-	private final int pageSize;
-	private final int bufferSize;
-	
-	public SplitFullScanEntityReader(String query,
-			String startTime, String endTime,
-			int splits, String startRowkey, int pageSize) {
-		this(
-				query, 
-				DateTimeUtil.humanDateToSecondsWithoutException(startTime) * 1000,
-				DateTimeUtil.humanDateToSecondsWithoutException(endTime) * 1000,
-				splits,
-				startRowkey,
-				pageSize
-			);
-	}
-	
-	public SplitFullScanEntityReader(String query, long startTime, long endTime,
-			int splits, String startRowkey, int pageSize) {
-		this(query, startTime, endTime, splits, startRowkey, pageSize, 
-				DEFAULT_BUFFER_SIZE);
-	}
-	
-	public SplitFullScanEntityReader(String query, long startTime, long endTime,
-			int splits, String startRowkey, int pageSize, int bufferSize) {
-		this.query = query;
-		this.startTime = startTime;
-		this.endTime = endTime;
-		this.splits = splits;
-		this.startRowkey = startRowkey;
-		this.pageSize = pageSize;
-		this.bufferSize = bufferSize;
-	}
-	
-	public EntityResultSet<ENTITY> read() throws Exception {
-		final EntityResultSet<ENTITY> resultSet = new EntityResultSet<ENTITY>(new ArrayBlockingQueue<TaggedLogAPIEntity>(bufferSize));
-		final List<GenericEntityBatchReader> readers = createSplitThreads();
-		
-		final int size = readers.size();
-		if (size > 0) {
-			final AtomicInteger threadCount = new AtomicInteger(size);
-			final AtomicInteger entityCount = new AtomicInteger(0);
-			for (GenericEntityBatchReader reader : readers) {
-				final EntityFetchThread<ENTITY> thread = new EntityFetchThread<ENTITY>(reader, threadCount, entityCount, resultSet);
-				thread.start();
-			}
-		} else {
-			resultSet.getQueue().add(COMPLETED_ENTITY);
-		}
-		return resultSet;
-	}
-
-	protected List<GenericEntityBatchReader> createSplitThreads() throws Exception {
-		
-		final List<GenericEntityBatchReader> readers = new ArrayList<GenericEntityBatchReader>();
-		final ListQueryCompiler comp = new ListQueryCompiler(query);
-		final EntityDefinition entityDef = EntityDefinitionManager.getEntityByServiceName(comp.serviceName());
-		if (entityDef == null) {
-			throw new IllegalArgumentException("Invalid entity name: " + comp.serviceName());
-		}
-		
-		// TODO: For now we don't support one query to query multiple partitions. In future 
-		// if partition is defined for the entity, internally We need to spawn multiple
-		// queries and send one query for each search condition for each partition
-		final List<String[]> partitionValues = comp.getQueryPartitionValues();
-		partitionConstraintValidate(partitionValues, query);
-		
-		long lastTimestamp = Long.MAX_VALUE;
-		if (startRowkey != null) {
-			final byte[] lastRowkey = EagleBase64Wrapper.decode(startRowkey);
-			lastTimestamp = RowkeyBuilder.getTimestamp(lastRowkey, entityDef);
-		}
-		
-		final long duration = (endTime - startTime) / splits;
-		for (int i = 0; i < splits; ++i) {
-			
-			final long slotStartTime = startTime + (i * duration);
-			if (slotStartTime > lastTimestamp) {
-				// ignore this slot
-				continue;
-			}
-			final long slotEndTime = startTime + ((i + 1) * duration);
-			final SearchCondition condition = new SearchCondition();
-			final String slotStartTimeString = DateTimeUtil.secondsToHumanDate(slotStartTime / 1000);
-			final String slotEndTimeString = DateTimeUtil.secondsToHumanDate(slotEndTime / 1000);
-			condition.setStartTime(slotStartTimeString);
-			condition.setEndTime(slotEndTimeString);
-			
-			condition.setFilter(comp.filter());
-			condition.setQueryExpression(comp.getQueryExpression());
-			if (partitionValues != null) {
-				condition.setPartitionValues(Arrays.asList(partitionValues.get(0)));
-			}
-			// Should be careful to the startRowkey setting. Only set startRowkey when 
-			// lastTimestamp is within the slot time range.
-			if (startRowkey != null && lastTimestamp >= startTime && lastTimestamp < endTime) {
-				condition.setStartRowkey(startRowkey);
-			}
-			condition.setPageSize(pageSize);
-			
-			if (comp.hasAgg()) {
-				List<String> groupbyFields = comp.groupbyFields();
-				List<String> outputFields = new ArrayList<String>();
-				if(groupbyFields != null){
-					outputFields.addAll(groupbyFields);
-				}
-				outputFields.addAll(comp.aggregateFields());
-				condition.setOutputFields(outputFields);
-			} else {
-				condition.setOutputFields(comp.outputFields());
-			}
-			readers.add(new GenericEntityBatchReader(comp.serviceName(), condition));
-		}
-		return readers;
-	}
-	
-
-	private static void partitionConstraintValidate(List<String[]> partitionValues, String query) {
-		if (partitionValues != null && partitionValues.size() > 1) {
-			final String[] values = partitionValues.get(0);
-			for (int i = 1; i < partitionValues.size(); ++i) {
-				final String[] tmpValues = partitionValues.get(i);
-				for (int j = 0; j < values.length; ++j) {
-					if (values[j] == null || (!values[j].equals(tmpValues[j]))) {
-						final String errMsg = "One query for multiple partitions is NOT allowed for now! Query: " + query;
-						LOG.error(errMsg);
-						throw new IllegalArgumentException(errMsg);
-					}
-				}
-			}
-		}
-	}
-	
-	
-	@SuppressWarnings("unchecked")
-	public static class EntityResultSet<ENTITY extends TaggedLogAPIEntity> {
-		private static final long DEFAULT_TIMEOUT_IN_MS = 1000;
-
-		private boolean fetchCompleted = false;
-		private final BlockingQueue<TaggedLogAPIEntity> queue;
-		private volatile Exception ex = null;
-
-		public EntityResultSet(BlockingQueue<TaggedLogAPIEntity> queue) {
-			this.queue = queue;
-		}
-		
-		public boolean hasMoreData() {
-			return queue.size() > 0 || (!fetchCompleted);
-		}
-		
-		public ENTITY next(long timeout, TimeUnit unit) throws InterruptedException {
-			if (fetchCompleted) {
-				return null;
-			}
-			final TaggedLogAPIEntity entity = queue.poll(timeout, unit);
-			if (COMPLETED_ENTITY.equals(entity)) {
-				fetchCompleted = true;
-				return null;
-			}
-			return (ENTITY)entity;
-		}
-		
-		public ENTITY next() throws Exception {
-			TaggedLogAPIEntity entity = null;
-			while (!fetchCompleted) {
-				try {
-					entity = queue.poll(DEFAULT_TIMEOUT_IN_MS, TimeUnit.MILLISECONDS);
-					if (COMPLETED_ENTITY.equals(entity)) {
-						fetchCompleted = true;
-						if (ex != null) {
-							throw ex;
-						}
-						return null;
-					}
-					if (entity != null) {
-						return (ENTITY)entity;
-					}
-				} catch (InterruptedException ex) {
-					// Just ignore
-				}
-			}
-			return null;
-		}
-		
-		void setException(Exception ex) {
-			this.ex = ex;
-		}
-		
-		BlockingQueue<TaggedLogAPIEntity> getQueue() {
-			return queue;
-		}
-	}
-	
-	private static class EntityFetchThread<ENTITY extends TaggedLogAPIEntity> extends Thread {
-		
-		private final GenericEntityBatchReader reader;
-		private final AtomicInteger threadCount;
-		private final AtomicInteger entityCount;
-		private final EntityResultSet<ENTITY> resultSet;
-		
-		private EntityFetchThread(GenericEntityBatchReader reader, AtomicInteger threadCount, AtomicInteger entityCount, EntityResultSet<ENTITY> resultSet) {
-			this.reader = reader;
-			this.threadCount = threadCount;
-			this.entityCount = entityCount;
-			this.resultSet = resultSet;
-		}
-		
-	    @Override
-	    public void run() {
-	    	try {
-	    		final List<ENTITY> entities = reader.read();
-	    		entityCount.addAndGet(entities.size());
-	    		for (ENTITY entity : entities) {
-	    			if (!resultSet.getQueue().offer(entity, MAX_WRITE_TIME_OUT_IN_SECONDS, TimeUnit.SECONDS)) {
-	    				resultSet.setException(new IOException("Write entity to queue timeout"));
-		    			resultSet.getQueue().add(COMPLETED_ENTITY);
-	    			}
-	    		}
-	    	} catch (Exception ex) {
-				resultSet.setException(ex);
-    			resultSet.getQueue().add(COMPLETED_ENTITY);
-	    	} finally {
-	    		final int count = threadCount.decrementAndGet();
-	    		if (count == 0) {
-	    			resultSet.getQueue().add(COMPLETED_ENTITY);
-	    			LOG.info("Total fetched " + entityCount.get() + " entities");
-	    		}
-	    	}
-	    }
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java
deleted file mode 100644
index fb52352..0000000
--- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericEntityServiceResource.java
+++ /dev/null
@@ -1,630 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.service.generic;
-
-import com.sun.jersey.core.header.FormDataContentDisposition;
-import com.sun.jersey.multipart.FormDataParam;
-import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
-import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
-import org.apache.eagle.log.entity.meta.EntityDefinition;
-import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
-import org.apache.eagle.storage.DataStorage;
-import org.apache.eagle.storage.DataStorageManager;
-import org.apache.eagle.storage.exception.IllegalDataStorageException;
-import org.apache.eagle.storage.operation.*;
-import org.apache.eagle.storage.result.ModifyResult;
-import org.apache.eagle.storage.result.QueryResult;
-import com.sun.jersey.api.json.JSONWithPadding;
-import org.apache.commons.lang.time.StopWatch;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.type.TypeFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.*;
-import javax.ws.rs.core.GenericEntity;
-import javax.ws.rs.core.MediaType;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-/**
- * @since 3/18/15
- */
-@Path(GenericEntityServiceResource.ROOT_PATH)
-@SuppressWarnings("unchecked")
-public class GenericEntityServiceResource {
-    public final static String ROOT_PATH = "/entities";
-    public final static String JSONP_PATH = "jsonp";
-    public final static String DELETE_ENTITIES_PATH = "delete";
-    public final static String ROWKEY_PATH = "rowkey";
-
-    public final static String FIRST_TIMESTAMP = "firstTimestamp";
-    public final static String LAST_TIMESTAMP = "lastTimestamp";
-    public final static String ELAPSEDMS = "elapsedms";
-    public final static String TOTAL_RESULTS = "totalResults";
-
-    private final static Logger LOG = LoggerFactory.getLogger(GenericEntityServiceResource.class);
-
-    private List<? extends TaggedLogAPIEntity> unmarshalEntitiesByServie(InputStream inputStream,EntityDefinition entityDefinition) throws IllegalAccessException, InstantiationException, IOException {
-        ObjectMapper objectMapper = new ObjectMapper();
-        return objectMapper.readValue(inputStream, TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, entityDefinition.getEntityClass()));
-    }
-
-    private List<String> unmarshalAsStringlist(InputStream inputStream) throws IllegalAccessException, InstantiationException, IOException {
-        ObjectMapper objectMapper = new ObjectMapper();
-        return objectMapper.readValue(inputStream, TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, String.class));
-    }
-
-
-    public GenericServiceAPIResponseEntity updateDatabase(Statement<ModifyResult<String>> statement) {
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<>();
-        Map<String,Object> meta = new HashMap<>();
-        StopWatch stopWatch = new StopWatch();
-
-        try {
-            stopWatch.start();
-            DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            if(dataStorage == null){
-                LOG.error("Data storage is null");
-                throw new IllegalDataStorageException("Data storage is null");
-            }
-            ModifyResult<String> result = statement.execute(dataStorage);
-            if(result.isSuccess()) {
-                List<String> keys =result.getIdentifiers();
-                if(keys != null) {
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS, keys.size());
-                } else {
-                    meta.put(TOTAL_RESULTS, 0);
-                }
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    public GenericServiceAPIResponseEntity updateEntities(List<? extends TaggedLogAPIEntity> entities, String serviceName) {
-        CreateStatement createStatement = new CreateStatement(entities, serviceName);
-        GenericServiceAPIResponseEntity<String> response = updateDatabase(createStatement);
-        return response;
-    }
-
-    @POST
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity create(InputStream inputStream,
-                                                 @QueryParam("serviceName") String serviceName){
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<String>();
-        Map<String,Object> meta = new HashMap<>();
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName);
-
-            if(entityDefinition == null){
-                throw new IllegalArgumentException("entity definition of service "+serviceName+" not found");
-            }
-
-            List<? extends TaggedLogAPIEntity> entities = unmarshalEntitiesByServie(inputStream, entityDefinition);
-            DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            CreateStatement createStatement = new CreateStatement(entities,entityDefinition);
-            ModifyResult<String> result = createStatement.execute(dataStorage);
-            if(result.isSuccess()) {
-                List<String> keys =result.getIdentifiers();
-                if(keys != null) {
-                    response.setObj(keys, String.class);
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS,keys.size());
-                }else{
-                    meta.put(TOTAL_RESULTS,0);
-                }
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    @POST
-    @Consumes({MediaType.MULTIPART_FORM_DATA})
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity create(@FormDataParam("file") InputStream fileInputStream,
-                                                  @FormDataParam("file") FormDataContentDisposition cdh,
-                                                  @QueryParam("serviceName") String serviceName) {
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<String>();
-        Map<String,Object> meta = new HashMap<>();
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName);
-
-            if(entityDefinition == null){
-                throw new IllegalArgumentException("entity definition of service "+serviceName+" not found");
-            }
-
-            List<? extends TaggedLogAPIEntity> entities = unmarshalEntitiesByServie(fileInputStream, entityDefinition);
-            DataStorage dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            CreateStatement createStatement = new CreateStatement(entities,entityDefinition);
-            ModifyResult<String> result = createStatement.execute(dataStorage);
-            if(result.isSuccess()) {
-                List<String> keys =result.getIdentifiers();
-                if(keys != null) {
-                    response.setObj(keys, String.class);
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS,keys.size());
-                }else{
-                    meta.put(TOTAL_RESULTS,0);
-                }
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    @PUT
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity update(InputStream inputStream,
-                                                 @QueryParam("serviceName") String serviceName){
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<String>();
-        DataStorage dataStorage;
-        Map<String,Object> meta = new HashMap<>();
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName);
-
-            if(entityDefinition == null){
-                throw new IllegalArgumentException("entity definition of service "+serviceName+" not found");
-            }
-
-            List<? extends TaggedLogAPIEntity> entities = unmarshalEntitiesByServie(inputStream, entityDefinition);
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-
-            UpdateStatement updateStatement = new UpdateStatement(entities,entityDefinition);
-            ModifyResult<String> result = updateStatement.execute(dataStorage);
-            if(result.isSuccess()) {
-                List<String> keys =result.getIdentifiers();
-                if(keys != null) {
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS,keys.size());
-                }else{
-                    meta.put(TOTAL_RESULTS,0);
-                }
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        } finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    @PUT
-    @Consumes({MediaType.MULTIPART_FORM_DATA})
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity update(@FormDataParam("file") InputStream fileInputStream,
-                                                  @FormDataParam("file") FormDataContentDisposition cdh,
-                                                  @QueryParam("serviceName") String serviceName){
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<String>();
-        DataStorage dataStorage;
-        Map<String,Object> meta = new HashMap<>();
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName);
-
-            if(entityDefinition == null){
-                throw new IllegalArgumentException("entity definition of service "+serviceName+" not found");
-            }
-
-            List<? extends TaggedLogAPIEntity> entities = unmarshalEntitiesByServie(fileInputStream, entityDefinition);
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-
-            UpdateStatement updateStatement = new UpdateStatement(entities,entityDefinition);
-            ModifyResult<String> result = updateStatement.execute(dataStorage);
-            if(result.isSuccess()) {
-                List<String> keys =result.getIdentifiers();
-                if(keys != null) {
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS,keys.size());
-                }else{
-                    meta.put(TOTAL_RESULTS,0);
-                }
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        } finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-
-
-    /**
-     * @param value rowkey value
-     * @param serviceName entity service name
-     * @return GenericServiceAPIResponseEntity
-     */
-    @GET
-    @Path(ROWKEY_PATH)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity search(@QueryParam("value") String value,@QueryParam("serviceName") String serviceName){
-        GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
-        Map<String,Object> meta = new HashMap<>();
-        DataStorage dataStorage;
-        StopWatch stopWatch = null;
-        try {
-            if(serviceName == null) throw new IllegalArgumentException("serviceName is null");
-            RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(value,serviceName);
-            stopWatch = new StopWatch();
-            stopWatch.start();
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            if(dataStorage==null){
-                LOG.error("Data storage is null");
-                throw new IllegalDataStorageException("data storage is null");
-            }
-            QueryResult<?> result = queryStatement.execute(dataStorage);
-            if(result.isSuccess()){
-                meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp());
-                meta.put(LAST_TIMESTAMP, result.getLastTimestamp());
-                meta.put(TOTAL_RESULTS, result.getSize());
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setObj(result.getData());
-                response.setType(result.getEntityType());
-                response.setSuccess(true);
-                response.setMeta(meta);
-                return response;
-            }
-        } catch (Exception e) {
-            response.setException(e);
-            LOG.error(e.getMessage(),e);
-        }finally {
-            if(stopWatch!=null) stopWatch.stop();
-        }
-        return response;
-    }
-
-    /**
-     * @param serviceName entity service name
-     * @return GenericServiceAPIResponseEntity
-     */
-    @POST
-    @Path(ROWKEY_PATH)
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity search(InputStream inputStream,@QueryParam("serviceName") String serviceName){
-        GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
-        Map<String,Object> meta = new HashMap<>();
-        DataStorage dataStorage;
-
-        StopWatch stopWatch = null;
-        try {
-            if(serviceName == null) throw new IllegalArgumentException("serviceName is null");
-
-            final List<String> values = unmarshalAsStringlist(inputStream);
-            final RowkeyQueryStatement queryStatement = new RowkeyQueryStatement(values,serviceName);
-
-            stopWatch = new StopWatch();
-            stopWatch.start();
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            if(dataStorage==null){
-                LOG.error("Data storage is null");
-                throw new IllegalDataStorageException("Data storage is null");
-            }
-            QueryResult<?> result = queryStatement.execute(dataStorage);
-            if(result.isSuccess()){
-                meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp());
-                meta.put(LAST_TIMESTAMP, result.getLastTimestamp());
-                meta.put(TOTAL_RESULTS, result.getSize());
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-                response.setObj(result.getData());
-                response.setType(result.getEntityType());
-                response.setSuccess(true);
-                response.setMeta(meta);
-                return response;
-            }
-        } catch (Exception e) {
-            response.setException(e);
-            LOG.error(e.getMessage(),e);
-        }finally {
-            if(stopWatch!=null) stopWatch.stop();
-        }
-        return response;
-    }
-
-
-    /**
-     *
-     * @param query
-     * @param startTime
-     * @param endTime
-     * @param pageSize
-     * @param startRowkey
-     * @param treeAgg
-     * @param timeSeries
-     * @param intervalmin
-     * @param top
-     * @param filterIfMissing
-     * @param parallel
-     * @param metricName
-     * @param verbose
-     * @return
-     */
-    @GET
-    @Produces(MediaType.APPLICATION_JSON)
-    @SuppressWarnings("unchecked")
-    public GenericServiceAPIResponseEntity search(@QueryParam("query") String query,
-                                                 @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
-                                                 @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
-                                                 @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
-                                                 @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
-                                                 @QueryParam("filterIfMissing") boolean filterIfMissing,
-                                                 @QueryParam("parallel") int parallel,
-                                                 @QueryParam("metricName") String metricName,
-                                                 @QueryParam("verbose") Boolean verbose){
-        RawQuery rawQuery = RawQuery.build()
-                .query(query)
-                .startTime(startTime)
-                .endTime(endTime)
-                .pageSize(pageSize)
-                .startRowkey(startRowkey)
-                .treeAgg(treeAgg)
-                .timeSeries(timeSeries)
-                .intervalMin(intervalmin)
-                .top(top)
-                .filerIfMissing(filterIfMissing)
-                .parallel(parallel)
-                .metricName(metricName)
-                .verbose(verbose)
-                .done();
-
-        QueryStatement queryStatement = new QueryStatement(rawQuery);
-        GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
-        Map<String,Object> meta = new HashMap<>();
-
-        DataStorage dataStorage;
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            if(dataStorage==null){
-                LOG.error("Data storage is null");
-                throw new IllegalDataStorageException("data storage is null");
-            }
-            
-            QueryResult<?> result = queryStatement.execute(dataStorage);
-            if(result.isSuccess()){
-                meta.put(FIRST_TIMESTAMP, result.getFirstTimestamp());
-                meta.put(LAST_TIMESTAMP, result.getLastTimestamp());
-                meta.put(TOTAL_RESULTS, result.getSize());
-                meta.put(ELAPSEDMS,stopWatch.getTime());
-
-                response.setObj(result.getData());
-                response.setType(result.getEntityType());
-                response.setSuccess(true);
-                response.setMeta(meta);
-                return response;
-            }
-        } catch (Exception e) {
-            response.setException(e);
-            LOG.error(e.getMessage(),e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    /**
-     *
-     * @param query
-     * @param startTime
-     * @param endTime
-     * @param pageSize
-     * @param startRowkey
-     * @param treeAgg
-     * @param timeSeries
-     * @param intervalmin
-     * @param top
-     * @param filterIfMissing
-     * @param parallel
-     * @param metricName
-     * @param verbose
-     * @return
-     */
-    @GET
-    @Path(JSONP_PATH)
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public JSONWithPadding searchWithJsonp(@QueryParam("query") String query,
-                                           @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
-                                           @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
-                                           @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
-                                           @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
-                                           @QueryParam("filterIfMissing") boolean filterIfMissing,
-                                           @QueryParam("parallel") int parallel,
-                                           @QueryParam("metricName") String metricName,
-                                           @QueryParam("verbose") Boolean verbose,
-                                           @QueryParam("callback") String callback){
-        GenericServiceAPIResponseEntity result = search(query, startTime, endTime, pageSize, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing, parallel, metricName, verbose);
-        return new JSONWithPadding(new GenericEntity<GenericServiceAPIResponseEntity>(result){}, callback);
-    }
-
-    /**
-     * TODO
-     *
-     * Delete by query
-     *
-     * @return
-     */
-    @DELETE
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity deleteByQuery(@QueryParam("query") String query,
-                                                 @QueryParam("startTime") String startTime, @QueryParam("endTime") String endTime,
-                                                 @QueryParam("pageSize") int pageSize, @QueryParam("startRowkey") String startRowkey,
-                                                 @QueryParam("treeAgg") boolean treeAgg, @QueryParam("timeSeries") boolean timeSeries,
-                                                 @QueryParam("intervalmin") long intervalmin, @QueryParam("top") int top,
-                                                 @QueryParam("filterIfMissing") boolean filterIfMissing,
-                                                 @QueryParam("parallel") int parallel,
-                                                 @QueryParam("metricName") String metricName,
-                                                 @QueryParam("verbose") Boolean verbose){
-        RawQuery rawQuery = RawQuery.build()
-                .query(query)
-                .startTime(startTime)
-                .endTime(endTime)
-                .pageSize(pageSize)
-                .startRowkey(startRowkey)
-                .treeAgg(treeAgg)
-                .timeSeries(timeSeries)
-                .intervalMin(intervalmin)
-                .top(top)
-                .filerIfMissing(filterIfMissing)
-                .parallel(parallel)
-                .metricName(metricName)
-                .verbose(verbose)
-                .done();
-
-        GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
-        Map<String,Object> meta = new HashMap<String, Object>();
-        DataStorage dataStorage = null;
-        StopWatch stopWatch = new StopWatch();
-        try {
-            stopWatch.start();
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            if(dataStorage==null){
-                LOG.error("Data storage is null");
-                throw new IllegalDataStorageException("Data storage is null");
-            }
-            
-            DeleteStatement deleteStatement = new DeleteStatement(rawQuery);
-            ModifyResult<String> deleteResult = deleteStatement.execute(dataStorage);
-            if(deleteResult.isSuccess()){
-                meta.put(ELAPSEDMS, stopWatch.getTime());
-                response.setObj(deleteResult.getIdentifiers(),String.class);
-                response.setSuccess(true);
-                response.setMeta(meta);
-            }
-            return response;
-        } catch (Exception e) {
-            response.setException(e);
-            LOG.error(e.getMessage(),e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-
-    /**
-     *
-     * Delete by entity lists
-     *
-     * Use "POST /entities/delete" instead of "DELETE  /entities" to walk around jersey DELETE issue for request with body
-     *
-     * @param inputStream
-     * @param serviceName
-     * @return
-     */
-    @POST
-    @Path(DELETE_ENTITIES_PATH)
-    @Consumes(MediaType.APPLICATION_JSON)
-    @Produces(MediaType.APPLICATION_JSON)
-    public GenericServiceAPIResponseEntity deleteEntities(InputStream inputStream,
-                                                 @QueryParam("serviceName") String serviceName,
-                                                 @QueryParam("byId") Boolean deleteById){
-        GenericServiceAPIResponseEntity<String> response = new GenericServiceAPIResponseEntity<String>();
-        DataStorage dataStorage = null;
-        Map<String,Object> meta = new HashMap<String, Object>();
-
-        if(deleteById == null) deleteById = false;
-
-        StopWatch stopWatch = new StopWatch();
-
-        try {
-            stopWatch.start();
-            dataStorage = DataStorageManager.getDataStorageByEagleConfig();
-            DeleteStatement statement = new DeleteStatement(serviceName);
-
-            if(deleteById) {
-                LOG.info("Deleting "+serviceName+" by ids");
-                List<String> deleteIds = unmarshalAsStringlist(inputStream);
-                statement.setIds(deleteIds);
-            }else {
-                LOG.info("Deleting "+serviceName+" by entities");
-                EntityDefinition entityDefinition = EntityDefinitionManager.getEntityByServiceName(serviceName);
-                if (entityDefinition == null) {
-                    throw new IllegalArgumentException("Entity definition of service " + serviceName + " not found");
-                }
-                List<? extends TaggedLogAPIEntity> entities = unmarshalEntitiesByServie(inputStream, entityDefinition);
-                statement.setEntities(entities);
-            }
-
-            ModifyResult<String> result = statement.execute(dataStorage);
-            if (result.isSuccess()) {
-                List<String> keys = result.getIdentifiers();
-                if (keys != null) {
-                    response.setObj(keys, String.class);
-                    meta.put(TOTAL_RESULTS, keys.size());
-                } else {
-                    meta.put(TOTAL_RESULTS, 0);
-                }
-                meta.put(ELAPSEDMS, stopWatch.getTime());
-                response.setMeta(meta);
-                response.setSuccess(true);
-            }
-        } catch (Exception e) {
-            LOG.error(e.getMessage(), e);
-            response.setException(e);
-        }finally {
-            stopWatch.stop();
-        }
-        return response;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/0ea130ef/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java b/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java
deleted file mode 100755
index c10c28d..0000000
--- a/eagle-core/eagle-query/eagle-service-base/src/main/java/org/apache/eagle/service/generic/GenericObjectMapperProvider.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.service.generic;
-
-import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ser.FilterProvider;
-
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.ext.ContextResolver;
-import javax.ws.rs.ext.Provider;
-
-@Provider
-@Produces(MediaType.APPLICATION_JSON)
-public class GenericObjectMapperProvider implements ContextResolver<ObjectMapper> {
-    private final static ObjectMapper OBJECT_MAPPER = new ObjectMapper();
-    @Override
-    public ObjectMapper getContext(Class<?> clazz) {
-        return OBJECT_MAPPER;
-    }
-    public static void setFilter(FilterProvider filter){
-        OBJECT_MAPPER.setFilters(filter);
-    }
-
-    static{
-        setFilter(TaggedLogAPIEntity.getFilterProvider());
-        // set more filter here
-    }
-}


Mime
View raw message