You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@eagle.apache.org by ha...@apache.org on 2016/09/07 12:30:51 UTC

[2/8] incubator-eagle git commit: [EAGLE-520] Fix and decouple co-processor from eagle aggreation query service

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
index d61c974..f93b68e 100755
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/aggregate/coprocessor/TestGroupAggregateTimeSeriesClient.java
@@ -1,220 +1,220 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.storage.hbase.aggregate.coprocessor;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.eagle.common.config.EagleConfigFactory;
-import org.junit.Assert;
-
-import org.apache.eagle.storage.hbase.query.coprocessor.AggregateClient;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DoubleWritable;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.eagle.log.entity.GenericEntityWriter;
-import org.apache.eagle.log.entity.meta.EntityDefinition;
-import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
-import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity;
-import org.apache.eagle.query.ListQueryCompiler;
-import org.apache.eagle.query.aggregate.AggregateFunctionType;
-import org.apache.eagle.query.aggregate.raw.GroupbyKey;
-import org.apache.eagle.query.aggregate.raw.GroupbyKeyValue;
-import org.apache.eagle.query.aggregate.raw.GroupbyValue;
-import org.apache.eagle.service.hbase.TestHBaseBase;
-import org.apache.eagle.storage.hbase.query.coprocessor.impl.AggregateClientImpl;
-
-/**
- * @since : 11/10/14,2014
- */
-@Ignore
-public class TestGroupAggregateTimeSeriesClient extends TestHBaseBase {
-
-	private final static Logger LOG = LoggerFactory.getLogger(TestGroupAggregateTimeSeriesClient.class);
-
-	HTableInterface table;
-	long startTime;
-	long endTime;
-	List<String> rowkeys;
-	AggregateClient client;
-	Scan scan;
-	EntityDefinition ed;
-
-	@Before
-	public void setUp() throws IllegalAccessException, InstantiationException {
-		ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class);
-		hbase.createTable("unittest", "f");
-		table = EagleConfigFactory.load().getHTable("unittest");
-		startTime = System.currentTimeMillis();
-		try {
-			rowkeys = prepareData(1000);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-		endTime = System.currentTimeMillis();
-
-		client = new AggregateClientImpl();
-		scan = new Scan();
-		ListQueryCompiler compiler = null;
-		try {
-			compiler = new ListQueryCompiler("TestTimeSeriesAPIEntity[@cluster=\"test4UT\" and @datacenter = \"dc1\"]{@field1,@field2}");
-		} catch (Exception e) {
-			Assert.fail(e.getMessage());
-		}
-		scan.setFilter(compiler.filter());
-//		scan.setStartRow(EagleBase64Wrapper.decode(rowkeys.get(0)));
-//		scan.setStopRow(EagleBase64Wrapper.decode(rowkeys.get(rowkeys.size()-1)));
-	}
-
-	private List<String> prepareData(int count) throws Exception {
-		List<TestTimeSeriesAPIEntity> list = new ArrayList<TestTimeSeriesAPIEntity>();
-
-		if (ed == null) {
-			EntityDefinitionManager.registerEntity(TestTimeSeriesAPIEntity.class);
-			ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class);
-		}
-
-		for(int i=0;i<count;i++){
-			TestTimeSeriesAPIEntity e = new TestTimeSeriesAPIEntity();
-			e.setTimestamp(System.currentTimeMillis());
-			e.setField1(1);
-			e.setField2(2);
-			e.setField3(3);
-			e.setField4(4L);
-			e.setField5(5.0);
-			e.setField6(5.0);
-			e.setField7("7");
-			e.setTags(new HashMap<String, String>());
-			e.getTags().put("cluster", "test4UT");
-			e.getTags().put("datacenter", "dc1");
-			e.getTags().put("index", ""+i);
-			e.getTags().put("jobId", "job_"+System.currentTimeMillis());
-			list.add(e);
-		}
-
-		GenericEntityWriter writer = new GenericEntityWriter(ed.getService());
-		List<String> result = writer.write(list);
-		return result;
-	}
-
-
-	//@Test
-	public void testGroupTimeSeriesAggCountClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan, Arrays.asList("cluster","datacenter"),Arrays.asList(AggregateFunctionType.count),Arrays.asList("count"),true,startTime,System.currentTimeMillis(),10).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("COUNT");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	//@Test
-	public void testGroupTimeSeriesAggMaxClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan, Arrays.asList("cluster","datacenter"),Arrays.asList(AggregateFunctionType.max),Arrays.asList("field2"),true,startTime,System.currentTimeMillis(),10).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("MAX");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	//@Test
-	public void testGroupTimeSeriesAggMinClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan, Arrays.asList("cluster","datacenter"),Arrays.asList(AggregateFunctionType.min),Arrays.asList("field2"),true,startTime,System.currentTimeMillis(),10).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("MIN");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	//@Test
-	public void testGroupTimeSeriesAggAvgClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan, Arrays.asList("cluster","datacenter"),Arrays.asList(AggregateFunctionType.min),Arrays.asList("field2"),true,startTime,System.currentTimeMillis(),10).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("MIN");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	//@Test
-	public void testGroupTimeSeriesAggSumClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan, Arrays.asList("cluster","datacenter"),Arrays.asList(AggregateFunctionType.sum),Arrays.asList("field2"),true,startTime,System.currentTimeMillis(),10).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("SUM");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	//@Test
-	public void testGroupTimeSeriesAggMultipleClient(){
-		try {
-			List<GroupbyKeyValue> result = client.aggregate(table,ed,scan,
-					Arrays.asList("cluster","datacenter"),
-					Arrays.asList(AggregateFunctionType.max,AggregateFunctionType.min,AggregateFunctionType.avg,AggregateFunctionType.sum,AggregateFunctionType.count),
-					Arrays.asList("field2","field2","field2","field2","field2"),true,startTime,System.currentTimeMillis(),16).getKeyValues();
-			if(LOG.isDebugEnabled()) LOG.debug("MUTILPLE");
-			logGroupbyKeyValue(result);
-			Assert.assertNotNull(result);
-		} catch (Exception e) {
-			e.printStackTrace();
-			Assert.fail(e.getMessage());
-		}
-	}
-
-	private void logGroupbyKeyValue(List<GroupbyKeyValue> keyValues){
-		for(GroupbyKeyValue keyValue:keyValues){
-			GroupbyKey key = keyValue.getKey();
-			List<String> keys = new ArrayList<String>();
-			for(BytesWritable bytes:key.getValue()){
-				keys.add(new String(bytes.copyBytes()));
-			}
-			List<Double> vals = new ArrayList<Double>();
-			GroupbyValue val = keyValue.getValue();
-			for(DoubleWritable dw:val.getValue()){
-				vals.add(dw.get());
-			}
-			if(LOG.isDebugEnabled()) LOG.debug("KEY: " + keys + ", VALUE: " + vals);
-		}
-	}
-}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.storage.hbase.aggregate.coprocessor;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.eagle.common.config.EagleConfigFactory;
+import org.junit.Assert;
+
+import org.apache.eagle.storage.hbase.query.coprocessor.AggregateClient;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.eagle.log.entity.GenericEntityWriter;
+import org.apache.eagle.log.entity.meta.EntityDefinition;
+import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
+import org.apache.eagle.log.entity.test.TestTimeSeriesAPIEntity;
+import org.apache.eagle.query.ListQueryCompiler;
+import org.apache.eagle.query.aggregate.AggregateFunctionType;
+import org.apache.eagle.query.aggregate.raw.GroupbyKey;
+import org.apache.eagle.query.aggregate.raw.GroupbyKeyValue;
+import org.apache.eagle.query.aggregate.raw.GroupbyValue;
+import org.apache.eagle.service.hbase.TestHBaseBase;
+import org.apache.eagle.storage.hbase.query.coprocessor.impl.AggregateClientImpl;
+
+/**
+ * @since : 11/10/14,2014
+ */
+@Ignore
+public class TestGroupAggregateTimeSeriesClient extends TestHBaseBase {
+
+    private final static Logger LOG = LoggerFactory.getLogger(TestGroupAggregateTimeSeriesClient.class);
+
+    HTableInterface table;
+    long startTime;
+    long endTime;
+    List<String> rowkeys;
+    AggregateClient client;
+    Scan scan;
+    EntityDefinition ed;
+
+    @Before
+    public void setUp() throws IllegalAccessException, InstantiationException {
+        ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class);
+        hbase.createTable("unittest", "f");
+        table = EagleConfigFactory.load().getHTable("unittest");
+        startTime = System.currentTimeMillis();
+        try {
+            rowkeys = prepareData(1000);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+        endTime = System.currentTimeMillis();
+
+        client = new AggregateClientImpl();
+        scan = new Scan();
+        ListQueryCompiler compiler = null;
+        try {
+            compiler = new ListQueryCompiler("TestTimeSeriesAPIEntity[@cluster=\"test4UT\" and @datacenter = \"dc1\"]{@field1,@field2}");
+        } catch (Exception e) {
+            Assert.fail(e.getMessage());
+        }
+        scan.setFilter(compiler.filter());
+//		scan.setStartRow(EagleBase64Wrapper.decode(rowkeys.get(0)));
+//		scan.setStopRow(EagleBase64Wrapper.decode(rowkeys.get(rowkeys.size()-1)));
+    }
+
+    private List<String> prepareData(int count) throws Exception {
+        List<TestTimeSeriesAPIEntity> list = new ArrayList<TestTimeSeriesAPIEntity>();
+
+        if (ed == null) {
+            EntityDefinitionManager.registerEntity(TestTimeSeriesAPIEntity.class);
+            ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestTimeSeriesAPIEntity.class);
+        }
+
+        for (int i = 0; i < count; i++) {
+            TestTimeSeriesAPIEntity e = new TestTimeSeriesAPIEntity();
+            e.setTimestamp(System.currentTimeMillis());
+            e.setField1(1);
+            e.setField2(2);
+            e.setField3(3);
+            e.setField4(4L);
+            e.setField5(5.0);
+            e.setField6(5.0);
+            e.setField7("7");
+            e.setTags(new HashMap<String, String>());
+            e.getTags().put("cluster", "test4UT");
+            e.getTags().put("datacenter", "dc1");
+            e.getTags().put("index", "" + i);
+            e.getTags().put("jobId", "job_" + System.currentTimeMillis());
+            list.add(e);
+        }
+
+        GenericEntityWriter writer = new GenericEntityWriter(ed.getService());
+        List<String> result = writer.write(list);
+        return result;
+    }
+
+
+    //@Test
+    public void testGroupTimeSeriesAggCountClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.count), Arrays.asList("count"), true, startTime, System.currentTimeMillis(), 10).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("COUNT");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    //@Test
+    public void testGroupTimeSeriesAggMaxClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.max), Arrays.asList("field2"), true, startTime, System.currentTimeMillis(), 10).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("MAX");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    //@Test
+    public void testGroupTimeSeriesAggMinClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.min), Arrays.asList("field2"), true, startTime, System.currentTimeMillis(), 10).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("MIN");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    //@Test
+    public void testGroupTimeSeriesAggAvgClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.min), Arrays.asList("field2"), true, startTime, System.currentTimeMillis(), 10).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("MIN");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    //@Test
+    public void testGroupTimeSeriesAggSumClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("field2"), true, startTime, System.currentTimeMillis(), 10).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("SUM");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    //@Test
+    public void testGroupTimeSeriesAggMultipleClient() {
+        try {
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan,
+                    Arrays.asList("cluster", "datacenter"),
+                    Arrays.asList(AggregateFunctionType.max, AggregateFunctionType.min, AggregateFunctionType.avg, AggregateFunctionType.sum, AggregateFunctionType.count),
+                    Arrays.asList("field2", "field2", "field2", "field2", "field2"), true, startTime, System.currentTimeMillis(), 16).getKeyValues();
+            if (LOG.isDebugEnabled()) LOG.debug("MUTILPLE");
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    private void logGroupbyKeyValue(List<GroupbyKeyValue> keyValues) {
+        for (GroupbyKeyValue keyValue : keyValues) {
+            GroupbyKey key = keyValue.getKey();
+            List<String> keys = new ArrayList<String>();
+            for (BytesWritable bytes : key.getValue()) {
+                keys.add(new String(bytes.copyBytes()));
+            }
+            List<Double> vals = new ArrayList<Double>();
+            GroupbyValue val = keyValue.getValue();
+            for (DoubleWritable dw : val.getValue()) {
+                vals.add(dw.get());
+            }
+            if (LOG.isDebugEnabled()) LOG.debug("KEY: " + keys + ", VALUE: " + vals);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorITSuite.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorITSuite.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorITSuite.java
new file mode 100644
index 0000000..d380418
--- /dev/null
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorITSuite.java
@@ -0,0 +1,246 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.storage.hbase.integrationtest;
+
+import org.apache.eagle.common.config.EagleConfigFactory;
+import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
+import org.apache.eagle.log.entity.GenericEntityWriter;
+import org.apache.eagle.log.entity.meta.EntityDefinition;
+import org.apache.eagle.log.entity.meta.EntityDefinitionManager;
+import org.apache.eagle.log.entity.test.TestLogAPIEntity;
+import org.apache.eagle.query.ListQueryCompiler;
+import org.apache.eagle.query.aggregate.AggregateFunctionType;
+import org.apache.eagle.query.aggregate.raw.GroupbyKey;
+import org.apache.eagle.query.aggregate.raw.GroupbyKeyValue;
+import org.apache.eagle.query.aggregate.raw.GroupbyValue;
+import org.apache.eagle.storage.hbase.query.coprocessor.AggregateClient;
+import org.apache.eagle.storage.hbase.query.coprocessor.impl.AggregateClientImpl;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DoubleWritable;
+import org.junit.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+@Ignore("Coprocessor Aggregate Client Integration Test.")
+public class CoprocessorITSuite {
+    private HTableInterface table;
+    private long startTime;
+    private long endTime;
+    private List<String> rowkeys;
+    private AggregateClient client;
+    private Scan scan;
+    private int num = 200;
+
+    private static final Logger LOG = LoggerFactory.getLogger(CoprocessorITSuite.class);
+
+    @BeforeClass
+    public static void before() {
+        System.setProperty("config.resource", "/application-sandbox.conf");
+    }
+
+    @Before
+    public void setUp() {
+        startTime = System.currentTimeMillis();
+        try {
+            rowkeys = prepareData(num);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+        endTime = System.currentTimeMillis();
+        table = EagleConfigFactory.load().getHTable("unittest");
+        client = new AggregateClientImpl();
+        scan = new Scan();
+        scan.setCaching(200);
+
+        ListQueryCompiler compiler = null;
+        try {
+            compiler = new ListQueryCompiler("TestLogAPIEntity[@cluster=\"test4UT\" and @datacenter=\"dc1\"]{@field1,@field2}");
+        } catch (Exception e) {
+            Assert.fail(e.getMessage());
+        }
+        scan.setFilter(compiler.filter());
+    }
+
+    private List<String> prepareData(int count) throws Exception {
+        List<TaggedLogAPIEntity> list = new ArrayList<TaggedLogAPIEntity>();
+        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+
+        if (ed == null) {
+            EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
+            ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        }
+        ed.setTimeSeries(true);
+        for (int i = 0; i < count; i++) {
+            TestLogAPIEntity e = new TestLogAPIEntity();
+            e.setTags(new HashMap<String, String>());
+            e.getTags().put("cluster", "test4UT");
+            e.getTags().put("datacenter", "dc1");
+            e.getTags().put("index", "" + i);
+            e.getTags().put("jobId", "job_" + System.currentTimeMillis());
+            e.setTimestamp(System.currentTimeMillis());
+            e.setField1(1);
+            e.setField2(2);
+            e.setField3(3);
+            e.setField4(4L);
+            e.setField5(5.0);
+            e.setField6(5.0);
+            e.setField7("7");
+            list.add(e);
+        }
+        GenericEntityWriter writer = new GenericEntityWriter(ed.getService());
+        LOG.info("Writing {} TestLogAPIEntity entities", list.size());
+        List<String> result = writer.write(list);
+        LOG.info("Finish writing test entities");
+        return result;
+    }
+
+    @Test
+    public void testGroupAggregateCountClient() {
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            final List<GroupbyKeyValue> result = client.aggregate(table, ed,
+                    scan, Arrays.asList("cluster", "datacenter"),
+                    Collections.singletonList(AggregateFunctionType.count),
+                    Collections.singletonList("field2")).getKeyValues();
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("COUNT");
+            }
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+        } catch (Exception e) {
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGroupAggregateAvgClient() {
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.avg), Arrays.asList("field2")).getKeyValues();
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("AVG");
+            }
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGroupAggregateMaxClient() {
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.max), Arrays.asList("field1")).getKeyValues();
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("MAX");
+            }
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGroupAggregateSumClient() {
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.sum), Arrays.asList("field2")).getKeyValues();
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("MAX");
+            }
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGroupAggregateMinClient() {
+
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"), Arrays.asList(AggregateFunctionType.min), Arrays.asList("field2")).getKeyValues();
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("MIN");
+            }
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGroupAggregateMultipleClient() {
+        try {
+            EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+            final List<GroupbyKeyValue> result = client.aggregate(table, ed, scan, Arrays.asList("cluster", "datacenter"),
+                    Arrays.asList(AggregateFunctionType.min,
+                            AggregateFunctionType.max,
+                            AggregateFunctionType.avg,
+                            AggregateFunctionType.count,
+                            AggregateFunctionType.sum),
+                    Arrays.asList("field2", "field2", "field2", "field2", "field2")).getKeyValues();
+            logGroupbyKeyValue(result);
+            Assert.assertNotNull(result);
+            Assert.assertTrue(result.size() > 0);
+            Assert.assertEquals(2.0, result.get(0).getValue().get(0).get(), 0.00001);
+            Assert.assertEquals(2.0, result.get(0).getValue().get(1).get(), 0.00001);
+            Assert.assertEquals(2.0, result.get(0).getValue().get(2).get(), 0.00001);
+            Assert.assertTrue(num <= result.get(0).getValue().get(3).get());
+            Assert.assertTrue(2.0 * num <= result.get(0).getValue().get(4).get());
+            Assert.assertEquals("test4UT", new String(result.get(0).getKey().getValue().get(0).copyBytes()));
+            Assert.assertEquals("dc1", new String(result.get(0).getKey().getValue().get(1).copyBytes()));
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail(e.getMessage());
+        }
+    }
+
+    private void logGroupbyKeyValue(List<GroupbyKeyValue> keyValues) {
+        for (GroupbyKeyValue keyValue : keyValues) {
+            GroupbyKey key = keyValue.getKey();
+            List<String> keys = new ArrayList<>();
+            for (BytesWritable bytes : key.getValue()) {
+                keys.add(new String(bytes.copyBytes()));
+            }
+            List<Double> vals = new ArrayList<>();
+            GroupbyValue val = keyValue.getValue();
+            for (DoubleWritable dw : val.getValue()) {
+                vals.add(dw.get());
+            }
+            if (LOG.isDebugEnabled()) LOG.debug("KEY: " + keys + ", VALUE: " + vals);
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorJarUtils.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorJarUtils.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorJarUtils.java
new file mode 100644
index 0000000..bc7a022
--- /dev/null
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorJarUtils.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.storage.hbase.integrationtest;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.TrueFileFilter;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+
+import java.io.File;
+import java.util.Collection;
+
+public class CoprocessorJarUtils {
+    public static File getCoprocessorJarFile() {
+        String projectRootDir = System.getProperty("user.dir");
+        String targetDirPath = projectRootDir + "/target/";
+        File targetDirFile = new File(targetDirPath);
+        if (!targetDirFile.exists()) {
+            throw new IllegalStateException(targetDirPath + " not found, please execute 'mvn install -DskipTests' under " + projectRootDir + " to build the project firstly and retry");
+        }
+        String jarFileNameWildCard = "eagle-storage-hbase-*-coprocessor.jar";
+        Collection<File> jarFiles = FileUtils.listFiles(targetDirFile, new WildcardFileFilter(jarFileNameWildCard), TrueFileFilter.INSTANCE);
+        if (jarFiles.size() == 0) {
+            throw new IllegalStateException("jar is not found, please execute 'mvn package -DskipTests' from project root firstly and retry");
+        }
+        return jarFiles.iterator().next();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorToolITSuite.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorToolITSuite.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorToolITSuite.java
new file mode 100644
index 0000000..8e7497b
--- /dev/null
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/integrationtest/CoprocessorToolITSuite.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.storage.hbase.integrationtest;
+
+import org.apache.eagle.storage.hbase.tools.CoprocessorTool;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.junit.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+@Ignore("Coprocessor CLI Tool Integration Test.")
+public class CoprocessorToolITSuite {
+    private static final String remoteJarPath = "/tmp/eagle-storage-hbase-latest-coprocessor.jar";
+    private static String localJarPath = null;
+    private static final Logger LOGGER = LoggerFactory.getLogger(CoprocessorToolITSuite.class);
+    private static final String toolITTableName = "coprocessor_it_table";
+
+    static {
+        Configuration.addDefaultResource("hbase-site-sandbox.xml");
+        localJarPath = CoprocessorJarUtils.getCoprocessorJarFile().getPath();
+    }
+
+    private void testRegisterCoprocessor(String tableName) throws Exception {
+        CoprocessorTool.main(new String[]{
+                "--register",
+                "--config", "hbase-site-sandbox.xml",
+                "--table", tableName,
+                "--jar", remoteJarPath,
+                "--localJar", localJarPath});
+    }
+
+    private void testUnregisterCoprocessor(String tableName) throws Exception {
+        CoprocessorTool.main(new String[]{
+                "--unregister",
+                "--config", "hbase-site-sandbox.xml",
+                "--table", tableName
+        });
+    }
+
+    private void ensureTable() throws IOException {
+        LOGGER.info("Creating table {}", toolITTableName);
+        HBaseAdmin admin = new HBaseAdmin(new Configuration());
+        HTableDescriptor hTableDescriptor = new HTableDescriptor(TableName.valueOf(toolITTableName));
+        hTableDescriptor.addFamily(new HColumnDescriptor("f"));
+        admin.createTable(hTableDescriptor);
+        admin.close();
+        LOGGER.info("Created table {}", toolITTableName);
+    }
+
+    @Test
+    public void testRegisterAndUnregisterCoprocessor() throws Exception {
+        try {
+            ensureTable();
+            testRegisterCoprocessor(toolITTableName);
+            testUnregisterCoprocessor(toolITTableName);
+        } finally {
+            deleteTable();
+        }
+    }
+
+    private void deleteTable() throws IOException {
+        HBaseAdmin admin = new HBaseAdmin(new Configuration());
+        admin.disableTable(TableName.valueOf(toolITTableName));
+        admin.deleteTable(TableName.valueOf(toolITTableName));
+        admin.close();
+    }
+
+    @Test
+    public void testRegisterCoprocessor() throws Exception {
+        testRegisterCoprocessor("unittest");
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorage.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorage.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorage.java
deleted file mode 100644
index cc5913a..0000000
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorage.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.storage.hbase.spi;
-
-import org.junit.Test;
-
-/**
- * @since 3/23/15
- */
-public class TestHBaseStorage {
-    @Test
-    public void testCreate(){
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
index 393dfd5..a4c0e23 100644
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/java/org/apache/eagle/storage/hbase/spi/TestHBaseStorageLoader.java
@@ -27,7 +27,8 @@ import org.junit.Test;
  * @since 3/20/15
  */
 public class TestHBaseStorageLoader {
-    @Test @Ignore("TODO: Add back after refactoring hbase related unit test cases")
+    @Test
+    @Ignore("TODO: Add back after refactoring hbase related unit test cases")
     public void testHBaseStorageLoader() {
         try {
             assert DataStorageManager.getDataStorageByEagleConfig() instanceof HBaseStorage;

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application-sandbox.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application-sandbox.conf b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application-sandbox.conf
new file mode 100644
index 0000000..9ef869c
--- /dev/null
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application-sandbox.conf
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+storage {
+  storage-type = "hbase"
+  table-name-prefixed-with-environment = false
+  coprocessor-enabled = true
+  hbase-zookeeper-quorum = "sandbox.hortonworks.com"
+  hbase-zookeeper-property-clientPort = 2181
+  zookeeper-znode-parent = "/hbase-unsecure"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application.conf b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application.conf
index 25257c5..d51e3f4 100644
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application.conf
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/application.conf
@@ -13,16 +13,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-eagle{
-	service{
-		env="dev"
-		host="localhost"
-		port=8080
-		storage-type="hbase"
-		table-name-prefixed-with-environment=false
-		coprocessor-enabled=false
-		hbase-zookeeper-quorum="localhost"
-		hbase-zookeeper-property-clientPort=2181
-		zookeeper-znode-parent="/hbase-unsecure"
-	}
+storage {
+  env = "dev"
+  host = "localhost"
+  port = 8080
+  storage-type = "hbase"
+  table-name-prefixed-with-environment = false
+  coprocessor-enabled = false
+  hbase-zookeeper-quorum = "localhost"
+  hbase-zookeeper-property-clientPort = 2181
+  zookeeper-znode-parent = "/hbase-unsecure"
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/hbase-site-sandbox.xml
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/hbase-site-sandbox.xml b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/hbase-site-sandbox.xml
new file mode 100644
index 0000000..17e40a1
--- /dev/null
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/hbase-site-sandbox.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration>
+    <property>
+        <name>fs.defaultFS</name>
+        <value>hdfs://sandbox.hortonworks.com:8020</value>
+    </property>
+    <property>
+        <name>hbase.zookeeper.property.clientPort</name>
+        <value>2181</value>
+    </property>
+    <property>
+        <name>hbase.zookeeper.quorum</name>
+        <value>sandbox.hortonworks.com</value>
+    </property>
+    <property>
+        <name>zookeeper.session.timeout</name>
+        <value>30000</value>
+    </property>
+    <property>
+        <name>zookeeper.znode.parent</name>
+        <value>/hbase-unsecure</value>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/log4j.properties b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/log4j.properties
index fb13ad5..ba06033 100644
--- a/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/log4j.properties
+++ b/eagle-core/eagle-query/eagle-storage-hbase/src/test/resources/log4j.properties
@@ -12,9 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 log4j.rootLogger=DEBUG, stdout
-
 # standard output
 log4j.appender.stdout=org.apache.log4j.ConsoleAppender
 log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/JdbcConstants.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/JdbcConstants.java b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/JdbcConstants.java
index a6d0c93..84bf86e 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/JdbcConstants.java
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/JdbcConstants.java
@@ -31,14 +31,14 @@ public class JdbcConstants {
     public static final int DEFAULT_VARCHAR_SIZE =30000;
 
     // Eagle JDBC Storage Configuration
-    public final static String EAGLE_DB_USERNAME = "eagle.service.storage-username";
-    public final static String EAGLE_DB_PASSWORD = "eagle.service.storage-password";
-    public final static String EAGLE_CONN_URL= "eagle.service.storage-connection-url";
-    public final static String EAGLE_CONN_PROPS= "eagle.service.storage-connection-props";
-    public final static String EAGLE_ADAPTER= "eagle.service.storage-adapter";
-    public final static String EAGLE_DATABASE= "eagle.service.storage-database";
-    public final static String EAGLE_DRIVER_CLASS= "eagle.service.storage-driver-class";
-    public final static String EAGLE_CONN_MAX_SIZE= "eagle.service.storage-connection-max";
+    public final static String EAGLE_DB_USERNAME = "storage.storage-username";
+    public final static String EAGLE_DB_PASSWORD = "storage.storage-password";
+    public final static String EAGLE_CONN_URL= "storage.storage-connection-url";
+    public final static String EAGLE_CONN_PROPS= "storage.storage-connection-props";
+    public final static String EAGLE_ADAPTER= "storage.storage-adapter";
+    public final static String EAGLE_DATABASE= "storage.storage-database";
+    public final static String EAGLE_DRIVER_CLASS= "storage.storage-driver-class";
+    public final static String EAGLE_CONN_MAX_SIZE= "storage.storage-connection-max";
 
     public static final boolean isReservedField(String columnName){
         return TIMESTAMP_COLUMN_NAME.equals(columnName) || METRIC_NAME_COLUMN_NAME.equals(columnName) || ROW_KEY_COLUMN_NAME.equals(columnName);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/JdbcEntitySchemaManager.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/JdbcEntitySchemaManager.java b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/JdbcEntitySchemaManager.java
index 4cd1967..9e47ac3 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/JdbcEntitySchemaManager.java
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/JdbcEntitySchemaManager.java
@@ -1,6 +1,4 @@
-package org.apache.eagle.storage.jdbc.schema;
-
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -17,15 +15,18 @@ package org.apache.eagle.storage.jdbc.schema;
  * limitations under the License.
  */
 
-import org.apache.ddlutils.Platform;
-import org.apache.ddlutils.PlatformFactory;
-import org.apache.ddlutils.model.*;
+package org.apache.eagle.storage.jdbc.schema;
+
 import org.apache.eagle.log.entity.GenericMetricEntity;
 import org.apache.eagle.log.entity.meta.Qualifier;
 import org.apache.eagle.storage.jdbc.JdbcConstants;
 import org.apache.eagle.storage.jdbc.conn.ConnectionConfig;
 import org.apache.eagle.storage.jdbc.conn.ConnectionConfigFactory;
 import org.apache.eagle.storage.jdbc.conn.ConnectionManagerFactory;
+
+import org.apache.ddlutils.Platform;
+import org.apache.ddlutils.PlatformFactory;
+import org.apache.ddlutils.model.*;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/serializer/MetricJdbcSerDeser.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/serializer/MetricJdbcSerDeser.java b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/serializer/MetricJdbcSerDeser.java
index 8162955..53ba528 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/serializer/MetricJdbcSerDeser.java
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/main/java/org/apache/eagle/storage/jdbc/schema/serializer/MetricJdbcSerDeser.java
@@ -1,16 +1,4 @@
-package org.apache.eagle.storage.jdbc.schema.serializer;
-
-import org.apache.eagle.log.entity.meta.Qualifier;
-import org.apache.eagle.storage.jdbc.JdbcConstants;
-import org.apache.eagle.storage.jdbc.schema.JdbcEntityDefinitionManager;
-import org.apache.torque.util.JdbcTypedValue;
-
-import java.io.IOException;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Types;
-
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
  * this work for additional information regarding copyright ownership.
@@ -26,6 +14,19 @@ import java.sql.Types;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+package org.apache.eagle.storage.jdbc.schema.serializer;
+
+import org.apache.eagle.log.entity.meta.Qualifier;
+import org.apache.eagle.storage.jdbc.JdbcConstants;
+import org.apache.eagle.storage.jdbc.schema.JdbcEntityDefinitionManager;
+import org.apache.torque.util.JdbcTypedValue;
+
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+
 public class MetricJdbcSerDeser implements JdbcSerDeser<double[]> {
     @Override
     public double[] toJavaTypedValue(ResultSet result, Class<?> fieldType, String fieldName, Qualifier qualifier) throws IOException {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-derby.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-derby.conf b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-derby.conf
index 689dc91..6b4aad1 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-derby.conf
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-derby.conf
@@ -13,8 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-eagle {
-	service {
+storage {
 		storage-type="jdbc"
 		storage-adapter="derby"
 		storage-username="eagle"
@@ -24,5 +23,4 @@ eagle {
 		storage-connection-props="encoding=UTF-8"
 		storage-driver-class="org.apache.derby.jdbc.EmbeddedDriver"
 		storage-connection-max=8
-	}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-mysql.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-mysql.conf b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-mysql.conf
index 61a899f..565c42b 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-mysql.conf
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application-mysql.conf
@@ -13,8 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-eagle {
-	service {
+storage{
 		storage-type="jdbc"
 		storage-adapter="mysql"
 		storage-username="eagle"
@@ -24,5 +23,4 @@ eagle {
 		storage-connection-props="encoding=UTF-8"
 		storage-driver-class="com.mysql.jdbc.Driver"
 		storage-connection-max=8
-	}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application.conf b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application.conf
index 689dc91..6b4aad1 100644
--- a/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application.conf
+++ b/eagle-core/eagle-query/eagle-storage-jdbc/src/test/resources/application.conf
@@ -13,8 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-eagle {
-	service {
+storage {
 		storage-type="jdbc"
 		storage-adapter="derby"
 		storage-username="eagle"
@@ -24,5 +23,4 @@ eagle {
 		storage-connection-props="encoding=UTF-8"
 		storage-driver-class="org.apache.derby.jdbc.EmbeddedDriver"
 		storage-connection-max=8
-	}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobConfigurationAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobConfigurationAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobConfigurationAPIEntity.java
index d186fd4..bd40c48 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobConfigurationAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobConfigurationAPIEntity.java
@@ -48,7 +48,7 @@ public class JobConfigurationAPIEntity extends JobBaseAPIEntity {
 
     public void setJobConfig(JobConfig jobConfig) {
         this.jobConfig = jobConfig;
-        _pcs.firePropertyChange("jobConfig", null, null);
+        pcs.firePropertyChange("jobConfig", null, null);
     }
 
     public String getConfigJobName() {
@@ -57,7 +57,7 @@ public class JobConfigurationAPIEntity extends JobBaseAPIEntity {
 
     public void setConfigJobName(String configJobName) {
         this.configJobName = configJobName;
-        _pcs.firePropertyChange("configJobName", null, null);
+        pcs.firePropertyChange("configJobName", null, null);
     }
 
     public String getAlertEmailList() {
@@ -66,6 +66,6 @@ public class JobConfigurationAPIEntity extends JobBaseAPIEntity {
 
     public void setAlertEmailList(String alertEmailList) {
         this.alertEmailList = alertEmailList;
-        _pcs.firePropertyChange("alertEmailList", null, null);
+        pcs.firePropertyChange("alertEmailList", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobEventAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobEventAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobEventAPIEntity.java
index c6bb8e4..aa2c3af 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobEventAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobEventAPIEntity.java
@@ -39,6 +39,6 @@ public class JobEventAPIEntity extends JobBaseAPIEntity {
 
     public void setEventType(String eventType) {
         this.eventType = eventType;
-        _pcs.firePropertyChange("eventType", null, null);
+        pcs.firePropertyChange("eventType", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobExecutionAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobExecutionAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobExecutionAPIEntity.java
index cdc5810..f7540d5 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobExecutionAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobExecutionAPIEntity.java
@@ -116,7 +116,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setCurrentState(String currentState) {
         this.currentState = currentState;
-        _pcs.firePropertyChange("currentState", null, null);
+        pcs.firePropertyChange("currentState", null, null);
     }
 
     public long getStartTime() {
@@ -125,7 +125,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setStartTime(long startTime) {
         this.startTime = startTime;
-        _pcs.firePropertyChange("startTime", null, null);
+        pcs.firePropertyChange("startTime", null, null);
     }
 
     public long getEndTime() {
@@ -134,7 +134,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setEndTime(long endTime) {
         this.endTime = endTime;
-        _pcs.firePropertyChange("endTime", null, null);
+        pcs.firePropertyChange("endTime", null, null);
     }
 
     public int getNumTotalMaps() {
@@ -143,7 +143,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumTotalMaps(int numTotalMaps) {
         this.numTotalMaps = numTotalMaps;
-        _pcs.firePropertyChange("numTotalMaps", null, null);
+        pcs.firePropertyChange("numTotalMaps", null, null);
     }
 
     public int getNumFailedMaps() {
@@ -152,7 +152,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumFailedMaps(int numFailedMaps) {
         this.numFailedMaps = numFailedMaps;
-        _pcs.firePropertyChange("numFailedMaps", null, null);
+        pcs.firePropertyChange("numFailedMaps", null, null);
     }
 
     public int getNumFinishedMaps() {
@@ -161,7 +161,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumFinishedMaps(int numFinishedMaps) {
         this.numFinishedMaps = numFinishedMaps;
-        _pcs.firePropertyChange("numFinishedMaps", null, null);
+        pcs.firePropertyChange("numFinishedMaps", null, null);
     }
 
     public int getNumTotalReduces() {
@@ -170,7 +170,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumTotalReduces(int numTotalReduces) {
         this.numTotalReduces = numTotalReduces;
-        _pcs.firePropertyChange("numTotalReduces", null, null);
+        pcs.firePropertyChange("numTotalReduces", null, null);
     }
 
     public int getNumFailedReduces() {
@@ -179,7 +179,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumFailedReduces(int numFailedReduces) {
         this.numFailedReduces = numFailedReduces;
-        _pcs.firePropertyChange("numFailedReduces", null, null);
+        pcs.firePropertyChange("numFailedReduces", null, null);
     }
 
     public int getNumFinishedReduces() {
@@ -188,7 +188,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setNumFinishedReduces(int numFinishedReduces) {
         this.numFinishedReduces = numFinishedReduces;
-        _pcs.firePropertyChange("numFinishedReduces", null, null);
+        pcs.firePropertyChange("numFinishedReduces", null, null);
     }
 
     public JobCounters getJobCounters() {
@@ -197,7 +197,7 @@ public class JobExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setJobCounters(JobCounters jobCounters) {
         this.jobCounters = jobCounters;
-        _pcs.firePropertyChange("jobCounters", null, null);
+        pcs.firePropertyChange("jobCounters", null, null);
     }
 
     public int getDataLocalMaps() {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobProcessTimeStampEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobProcessTimeStampEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobProcessTimeStampEntity.java
index 6afe347..d887698 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobProcessTimeStampEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/JobProcessTimeStampEntity.java
@@ -40,6 +40,6 @@ public class JobProcessTimeStampEntity extends TaggedLogAPIEntity {
 
     public void setCurrentTimeStamp(long currentTimeStamp) {
         this.currentTimeStamp = currentTimeStamp;
-        _pcs.firePropertyChange("currentTimeStamp", null, null);
+        pcs.firePropertyChange("currentTimeStamp", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptCounterAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptCounterAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptCounterAPIEntity.java
index e526f45..1bc9ca0 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptCounterAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptCounterAPIEntity.java
@@ -43,7 +43,7 @@ public class TaskAttemptCounterAPIEntity extends JobBaseAPIEntity {
 
     public void setKilledCount(int killedCount) {
         this.killedCount = killedCount;
-        _pcs.firePropertyChange("killedCount", null, null);
+        pcs.firePropertyChange("killedCount", null, null);
     }
 
     public int getFailedCount() {
@@ -52,7 +52,7 @@ public class TaskAttemptCounterAPIEntity extends JobBaseAPIEntity {
 
     public void setFailedCount(int failedCount) {
         this.failedCount = failedCount;
-        _pcs.firePropertyChange("failedCount", null, null);
+        pcs.firePropertyChange("failedCount", null, null);
     }
 
     public int getTotalCount() {
@@ -61,6 +61,6 @@ public class TaskAttemptCounterAPIEntity extends JobBaseAPIEntity {
 
     public void setTotalCount(int totalCount) {
         this.totalCount = totalCount;
-        _pcs.firePropertyChange("totalCount", null, null);
+        pcs.firePropertyChange("totalCount", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptExecutionAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptExecutionAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptExecutionAPIEntity.java
index fd96828..d1210b9 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptExecutionAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskAttemptExecutionAPIEntity.java
@@ -55,7 +55,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setTaskStatus(String taskStatus) {
         this.taskStatus = taskStatus;
-        _pcs.firePropertyChange("taskStatus", null, null);
+        pcs.firePropertyChange("taskStatus", null, null);
     }
 
     public long getStartTime() {
@@ -64,7 +64,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setStartTime(long startTime) {
         this.startTime = startTime;
-        _pcs.firePropertyChange("startTime", null, null);
+        pcs.firePropertyChange("startTime", null, null);
     }
 
     public long getEndTime() {
@@ -73,7 +73,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setEndTime(long endTime) {
         this.endTime = endTime;
-        _pcs.firePropertyChange("endTime", null, null);
+        pcs.firePropertyChange("endTime", null, null);
     }
 
     public long getDuration() {
@@ -82,7 +82,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setDuration(long duration) {
         this.duration = duration;
-        _pcs.firePropertyChange("duration", null, null);
+        pcs.firePropertyChange("duration", null, null);
     }
 
     public String getError() {
@@ -91,7 +91,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setError(String error) {
         this.error = error;
-        _pcs.firePropertyChange("error", null, null);
+        pcs.firePropertyChange("error", null, null);
     }
 
     public JobCounters getJobCounters() {
@@ -100,7 +100,7 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setJobCounters(JobCounters jobCounters) {
         this.jobCounters = jobCounters;
-        _pcs.firePropertyChange("jobCounters", null, null);
+        pcs.firePropertyChange("jobCounters", null, null);
     }
 
     public String getTaskAttemptID() {
@@ -109,6 +109,6 @@ public class TaskAttemptExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setTaskAttemptID(String taskAttemptID) {
         this.taskAttemptID = taskAttemptID;
-        _pcs.firePropertyChange("taskAttemptID", null, null);
+        pcs.firePropertyChange("taskAttemptID", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskExecutionAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskExecutionAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskExecutionAPIEntity.java
index bf559d4..d573da3 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskExecutionAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskExecutionAPIEntity.java
@@ -53,7 +53,7 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setTaskStatus(String taskStatus) {
         this.taskStatus = taskStatus;
-        _pcs.firePropertyChange("taskStatus", null, null);
+        pcs.firePropertyChange("taskStatus", null, null);
     }
 
     public long getStartTime() {
@@ -62,7 +62,7 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setStartTime(long startTime) {
         this.startTime = startTime;
-        _pcs.firePropertyChange("startTime", null, null);
+        pcs.firePropertyChange("startTime", null, null);
     }
 
     public long getEndTime() {
@@ -71,7 +71,7 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setEndTime(long endTime) {
         this.endTime = endTime;
-        _pcs.firePropertyChange("endTime", null, null);
+        pcs.firePropertyChange("endTime", null, null);
     }
 
     public long getDuration() {
@@ -80,7 +80,7 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setDuration(long duration) {
         this.duration = duration;
-        _pcs.firePropertyChange("duration", null, null);
+        pcs.firePropertyChange("duration", null, null);
     }
 
     public String getError() {
@@ -89,7 +89,7 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setError(String error) {
         this.error = error;
-        _pcs.firePropertyChange("error", null, null);
+        pcs.firePropertyChange("error", null, null);
     }
 
     public JobCounters getJobCounters() {
@@ -98,6 +98,6 @@ public class TaskExecutionAPIEntity extends JobBaseAPIEntity {
 
     public void setJobCounters(JobCounters jobCounters) {
         this.jobCounters = jobCounters;
-        _pcs.firePropertyChange("jobCounters", null, null);
+        pcs.firePropertyChange("jobCounters", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskFailureCountAPIEntity.java
----------------------------------------------------------------------
diff --git a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskFailureCountAPIEntity.java b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskFailureCountAPIEntity.java
index 31f96da..fe41979 100644
--- a/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskFailureCountAPIEntity.java
+++ b/eagle-jpm/eagle-jpm-entity/src/main/java/org/apache/eagle/jpm/mr/historyentity/TaskFailureCountAPIEntity.java
@@ -43,7 +43,7 @@ public class TaskFailureCountAPIEntity extends JobBaseAPIEntity {
 
     public void setTaskStatus(String taskStatus) {
         this.taskStatus = taskStatus;
-        _pcs.firePropertyChange("taskStatus", null, null);
+        pcs.firePropertyChange("taskStatus", null, null);
     }
 
     public String getError() {
@@ -52,7 +52,7 @@ public class TaskFailureCountAPIEntity extends JobBaseAPIEntity {
 
     public void setError(String error) {
         this.error = error;
-        _pcs.firePropertyChange("error", null, null);
+        pcs.firePropertyChange("error", null, null);
     }
 
     public int getFailureCount() {
@@ -61,6 +61,6 @@ public class TaskFailureCountAPIEntity extends JobBaseAPIEntity {
 
     public void setFailureCount(int failureCount) {
         this.failureCount = failureCount;
-        _pcs.firePropertyChange("failureCount", null, null);
+        pcs.firePropertyChange("failureCount", null, null);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
index 7e8a804..207389a 100644
--- a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
+++ b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
@@ -44,61 +44,59 @@ import org.slf4j.LoggerFactory;
 
 import org.apache.eagle.service.security.hdfs.HDFSFileSystem;
 
-
 /**
  * REST Web Service to browse files and Paths in HDFS
  */
 @Path(HDFSResourceConstants.HDFS_RESOURCE)
 public class HDFSResourceWebResource {
-	private static Logger LOG = LoggerFactory.getLogger(HDFSResourceWebResource.class);
-	final public static String HDFS_APPLICATION = "HdfsAuditLogApplication";
-	private ApplicationEntityService entityService;
-	private ISecurityMetadataDAO dao;
+    private static Logger LOG = LoggerFactory.getLogger(HDFSResourceWebResource.class);
+    final public static String HDFS_APPLICATION = "HdfsAuditLogApplication";
+    private ApplicationEntityService entityService;
+    private ISecurityMetadataDAO dao;
 
-	@Inject
-	public HDFSResourceWebResource(ApplicationEntityService entityService, Config eagleServerConfig){
-		this.entityService = entityService;
-		dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
-	}
+    @Inject
+    public HDFSResourceWebResource(ApplicationEntityService entityService, Config eagleServerConfig) {
+        this.entityService = entityService;
+        dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
+    }
 
-	@GET
-	@Consumes(MediaType.APPLICATION_JSON)
-	@Produces(MediaType.APPLICATION_JSON)
-	public HDFSResourceWebResponse  getHDFSResource( @QueryParam("site") String site , @QueryParam("path") String filePath )
-	{
-		LOG.info("Starting HDFS Resource Browsing.  Query Parameters ==> Site :"+site+"  Path : "+filePath );
-		HDFSResourceWebResponse response = new HDFSResourceWebResponse();
-		HDFSResourceWebRequestValidator validator = new HDFSResourceWebRequestValidator();
-		List<FileStatusEntity> result = new ArrayList<>();
-		List<FileStatus> fileStatuses = null;
-		try {
-			validator.validate(site, filePath); // First Step would be validating Request
-			Map<String, Object> config = getAppConfig(site, HDFS_APPLICATION);
-			Configuration conf = convert(config);
-			HDFSFileSystem fileSystem = new HDFSFileSystem(conf);
-			fileStatuses = fileSystem.browse(filePath);
-			// Join with File Sensitivity Info
-			HDFSResourceSensitivityDataJoiner joiner = new HDFSResourceSensitivityDataJoiner(dao);
-			result = joiner.joinFileSensitivity(site, fileStatuses);
-			LOG.info("Successfully browsed files in HDFS .");
-		} catch( Exception ex ) {
-			response.setException(EagleExceptionWrapper.wrap(ex));
-			LOG.error(" Exception When browsing Files for the HDFS Path  :"+filePath+"  " , ex);
-		}
-		response.setObj(result);
-		return response;
-	}
+    @GET
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public HDFSResourceWebResponse getHDFSResource(@QueryParam("site") String site, @QueryParam("path") String filePath) {
+        LOG.info("Starting HDFS Resource Browsing.  Query Parameters ==> Site :" + site + "  Path : " + filePath);
+        HDFSResourceWebResponse response = new HDFSResourceWebResponse();
+        HDFSResourceWebRequestValidator validator = new HDFSResourceWebRequestValidator();
+        List<FileStatusEntity> result = new ArrayList<>();
+        List<FileStatus> fileStatuses = null;
+        try {
+            validator.validate(site, filePath); // First Step would be validating Request
+            Map<String, Object> config = getAppConfig(site, HDFS_APPLICATION);
+            Configuration conf = convert(config);
+            HDFSFileSystem fileSystem = new HDFSFileSystem(conf);
+            fileStatuses = fileSystem.browse(filePath);
+            // Join with File Sensitivity Info
+            HDFSResourceSensitivityDataJoiner joiner = new HDFSResourceSensitivityDataJoiner(dao);
+            result = joiner.joinFileSensitivity(site, fileStatuses);
+            LOG.info("Successfully browsed files in HDFS .");
+        } catch (Exception ex) {
+            response.setException(EagleExceptionWrapper.wrap(ex));
+            LOG.error(" Exception When browsing Files for the HDFS Path  :" + filePath + "  ", ex);
+        }
+        response.setObj(result);
+        return response;
+    }
 
-	private Map<String, Object> getAppConfig(String site, String appType){
-		ApplicationEntity entity = entityService.getBySiteIdAndAppType(site, appType);
-		return entity.getConfiguration();
-	}
+    private Map<String, Object> getAppConfig(String site, String appType) {
+        ApplicationEntity entity = entityService.getBySiteIdAndAppType(site, appType);
+        return entity.getConfiguration();
+    }
 
-	private Configuration convert(Map<String, Object> originalConfig) throws Exception {
-		Configuration config = new Configuration();
-		for (Map.Entry<String, Object> entry : originalConfig.entrySet()) {
-			config.set(entry.getKey().toString(), entry.getValue().toString());
-		}
-		return config;
-	}
+    private Configuration convert(Map<String, Object> originalConfig) throws Exception {
+        Configuration config = new Configuration();
+        for (Map.Entry<String, Object> entry : originalConfig.entrySet()) {
+            config.set(entry.getKey().toString(), entry.getValue().toString());
+        }
+        return config;
+    }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-server-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/pom.xml b/eagle-server-assembly/pom.xml
index 5b5d524..5b9537b 100644
--- a/eagle-server-assembly/pom.xml
+++ b/eagle-server-assembly/pom.xml
@@ -25,7 +25,6 @@
         <artifactId>eagle-parent</artifactId>
         <version>0.5.0-incubating-SNAPSHOT</version>
     </parent>
-    <groupId>org.apache.eagle</groupId>
     <artifactId>eagle-server-assembly</artifactId>
     <name>eagle-server-assembly</name>
     <url>http://maven.apache.org</url>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-server-assembly/src/main/bin/eagle-env.sh
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/src/main/bin/eagle-env.sh b/eagle-server-assembly/src/main/bin/eagle-env.sh
index 1aa556d..b2c04f8 100644
--- a/eagle-server-assembly/src/main/bin/eagle-env.sh
+++ b/eagle-server-assembly/src/main/bin/eagle-env.sh
@@ -1,38 +1,26 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# set EAGLE_HOME
-export EAGLE_HOME=$(dirname $0)/..
-
-# EAGLE_SERVICE_HOST, default is `hostname -f`
-export EAGLE_SERVICE_HOST=localhost
-
-# EAGLE_SERVICE_PORT, default is 9099
-export EAGLE_SERVICE_PORT=9099
-
-# EAGLE_SERVICE_USER
-export EAGLE_SERVICE_USER=admin
-
-# EAGLE_SERVICE_PASSWORD
-export EAGLE_SERVICE_PASSWD=secret
-
-export EAGLE_CLASSPATH=$EAGLE_HOME/conf
-
-# Add eagle shared library jars
-for file in `ls $EAGLE_HOME/lib`; do
-	EAGLE_CLASSPATH=$EAGLE_CLASSPATH:$EAGLE_HOME/lib/$file
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# set EAGLE_HOME
+export EAGLE_HOME=$(dirname $0)/..
+
+export EAGLE_CLASSPATH=$EAGLE_HOME/conf
+
+# Add eagle shared library jars
+for file in `ls $EAGLE_HOME/lib`; do
+	EAGLE_CLASSPATH=$EAGLE_CLASSPATH:$EAGLE_HOME/lib/$file
 done
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-server-assembly/src/main/bin/eagle-service.sh
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/src/main/bin/eagle-service.sh b/eagle-server-assembly/src/main/bin/eagle-service.sh
deleted file mode 100644
index 533fa2c..0000000
--- a/eagle-server-assembly/src/main/bin/eagle-service.sh
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-function print_help() {
-	echo "Usage: $0 {start | stop | restart | status}"
-	exit 1
-}
-
-if [ $# != 1 ]
-then
-	print_help
-fi
-
-DIR=$(dirname $0)
-
-source ${DIR}/eagle-env.sh
-
-export JAVA_OPTS="-Xmx3072m -XX:MaxPermSize=1024m"
-
-PIDFILE="${DIR}/eagle-service.pid"
-
-CONFIGURATION_YML="${DIR}/../conf/configuration.yml"
-
-DEBUG_OPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"
-
-PROGRAM="java -cp $EAGLE_CLASSPATH org.apache.eagle.server.ServerMain server ${CONFIGURATION_YML}"
-
-start() {
-    echo "Starting eagle service ..."
-	echo ${PROGRAM}
-	nohup ${PROGRAM} & echo $! > $PIDFILE
-	if [ $? != 0 ];then
-		echo "Error: failed starting"
-		exit 1
-	fi
-}
-
-stop() {
-    echo "Stopping eagle service ..."
-	if [[ ! -f $PIDFILE ]];then
-	    echo "Eagle service is not running"
-    	exit 1
-    fi
-
-    PID=`cat $PIDFILE`
-	kill $PID
-	if [ $? != 0 ];then
-		echo "Error: failed stopping"
-		exit 1
-	fi
-
-	rm ${PIDFILE}
-	echo "Stopping is completed"
-}
-
-case $1 in
-"start")
-    start;
-	;;
-"stop")
-    stop;
-	;;
-"restart")
-	echo "Restarting eagle service ..."
-    stop; sleep 1; start;
-	echo "Restarting is completed "
-	;;
-"status")
-	echo "Checking eagle service status ..."
-	if [[ -e ${PIDFILE} ]]; then
-	    PID=`cat $PIDFILE`
-	fi
-	if [[ -z ${PID} ]];then
-	    echo "Error: Eagle service is not running (missing PID)"
-	    exit 0
-	elif ps -p ${PID} > /dev/null; then
-	    echo "Eagle service is running with PID $PID"
-	    exit 0
-    else
-        echo "Eagle service is not running (tested PID: ${PID})"
-        exit 0
-    fi
-	;;
-*)
-	print_help
-	;;
-esac
-
-if [ $? != 0 ]; then
-	echo "Error: start failure"
-	exit 1
-fi
-
-exit 0
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/21187b55/eagle-server/src/main/java/org/apache/eagle/server/RESTExceptionMapper.java
----------------------------------------------------------------------
diff --git a/eagle-server/src/main/java/org/apache/eagle/server/RESTExceptionMapper.java b/eagle-server/src/main/java/org/apache/eagle/server/RESTExceptionMapper.java
index 64f9fe0..1799eff 100644
--- a/eagle-server/src/main/java/org/apache/eagle/server/RESTExceptionMapper.java
+++ b/eagle-server/src/main/java/org/apache/eagle/server/RESTExceptionMapper.java
@@ -16,14 +16,13 @@
  */
 package org.apache.eagle.server;
 
-import io.dropwizard.jersey.errors.LoggingExceptionMapper;
-import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.eagle.metadata.resource.RESTResponse;
+import io.dropwizard.jersey.errors.LoggingExceptionMapper;
 
+import java.util.concurrent.ThreadLocalRandom;
 import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.util.concurrent.ThreadLocalRandom;
 
 public class RESTExceptionMapper extends LoggingExceptionMapper<Throwable> {
     @Override
@@ -33,6 +32,6 @@ public class RESTExceptionMapper extends LoggingExceptionMapper<Throwable> {
         if (throwable instanceof WebApplicationException) {
             return ((WebApplicationException) throwable).getResponse();
         }
-       return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new RESTResponse<>(throwable)).type(MediaType.APPLICATION_JSON_TYPE).build();
+        return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new RESTResponse<>(throwable)).type(MediaType.APPLICATION_JSON_TYPE).build();
     }
 }
\ No newline at end of file