You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2017/10/09 07:12:41 UTC

[1/4] incubator-griffin git commit: change json format, update unit test and fix hive connect

Repository: incubator-griffin
Updated Branches:
  refs/heads/master 0a3de7532 -> 43f9dbf7b


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImplTest.java
deleted file mode 100644
index 9ee7ff1..0000000
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImplTest.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.apache.griffin.core.error.exception.GriffinException.HiveConnectionException;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.boot.test.mock.mockito.MockBean;
-import org.springframework.context.annotation.Bean;
-import org.springframework.test.context.junit4.SpringRunner;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import static org.assertj.core.api.Assertions.fail;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.BDDMockito.given;
-import static org.mockito.Mockito.doNothing;
-
-
-@RunWith(SpringRunner.class)
-public class HiveMetastoreServiceImplTest {
-    private static final Logger log = LoggerFactory.getLogger(HiveMetastoreServiceImplTest.class);
-
-    @TestConfiguration
-    public static class HiveMetastoreServiceConfiguration{
-        @Bean
-        public HiveMetastoreServiceImpl service(){
-            return new HiveMetastoreServiceImpl();
-        }
-    }
-
-    @MockBean
-    private HiveMetaStoreClient client;
-
-    @Autowired
-    private HiveMetastoreServiceImpl service;
-
-    @Before
-    public void setup(){
-
-    }
-
-    @Test
-    public void testGetAllDatabases(){
-        try {
-            Iterable<String> tmp = service.getAllDatabases();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-        try {
-            given(client.getAllDatabases()).willThrow(MetaException.class);
-            doNothing().when(client).reconnect();
-            service.getAllDatabases();
-        } catch (MetaException e) {
-            log.info("testGetAllDatabases: test catch "+e);
-        } catch (HiveConnectionException e) {
-            e.printStackTrace();
-        }
-    }
-
-
-    @Test
-    public void testGetAllTableNames(){
-        try {
-            Iterable<String> tmp = service.getAllTableNames("default");
-            assertTrue(true);
-
-        }catch (Throwable t){
-            fail("Cannot get all tables in db default");
-        }
-        try {
-            given(client.getAllTables("default")).willThrow(MetaException.class);
-            doNothing().when(client).reconnect();
-            service.getAllTableNames("default");
-        } catch (MetaException e) {
-            log.info("testGetAllTableNames: test catch "+e);
-        } catch (HiveConnectionException e) {
-            e.printStackTrace();
-        }
-    }
-
-    @Test
-    public void testGetAllTableByDBName(){
-        try {
-            String useDbName="default";
-            given(client.getAllTables(useDbName)).willReturn(Arrays.asList("cout","cout1"));
-            List<Table> tmp = service.getAllTablesByDbName(useDbName);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables in default db");
-        }
-        try {
-            given(client.getAllTables("default")).willThrow(MetaException.class);
-            doNothing().when(client).reconnect();
-            service.getAllTablesByDbName("default");
-        } catch (Exception e) {
-            log.info("testGetAllTableByDBName: test catch "+e);
-        }
-    }
-
-    @Test
-    public void testGetAllTable(){
-        try {
-            Iterable<String> dbs=new ArrayList<>();
-            given(service.getAllDatabases()).willReturn(Arrays.asList("default","griffin"));
-            String useDbName="default";
-            given(client.getAllTables(useDbName)).willReturn(Arrays.asList("cout","cout1"));
-            Map<String, List<Table>> tmp = service.getAllTable();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all table in map format");
-        }
-        try {
-            given(client.getAllTables("default")).willThrow(MetaException.class);
-            doNothing().when(client).reconnect();
-            service.getAllTable();
-        } catch (Exception e) {
-            log.info("testGetAllTable: test catch "+e);
-        }
-    }
-
-    @Test
-    public void testGetDesignatedTable(){
-        try {
-            Table tmp = service.getTable("default","xxx");
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get xxx table in default db");
-        }
-        try {
-            given(client.getTable("default","xxx")).willThrow(MetaException.class);
-            doNothing().when(client).reconnect();
-            service.getTable("default","xxx");
-        } catch (Exception e) {
-            log.info("testGetDesignatedTable: test catch "+e);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java
index c97e34d..d8328c5 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaControllerTest.java
@@ -1,110 +1,110 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.kafka;
-
-import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
-import org.apache.griffin.core.metastore.kafka.KafkaSchemaController;
-import org.apache.griffin.core.metastore.kafka.KafkaSchemaServiceImpl;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-
-@RunWith(SpringRunner.class)
-public class KafkaSchemaControllerTest {
-    private MockMvc mockMvc;
-
-    @Mock
-    KafkaSchemaServiceImpl kafkaSchemaService;
-
-    @InjectMocks
-    private KafkaSchemaController kafkaSchemaController;
-
-    @Before
-    public void setup(){
-        MockitoAnnotations.initMocks(this);
-        this.mockMvc = MockMvcBuilders.standaloneSetup(kafkaSchemaController).build();
-    }
-
-    @Test
-    public void test_getSubjects() throws Exception {
-        int id=1;
-        SchemaString ss = new SchemaString();
-        when(kafkaSchemaService.getSchemaString(id)).thenReturn(ss);
-        mockMvc.perform(get("/metadata/kafka/schema/{id}",id))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getSchemaString(id);
-    }
-
-    @Test
-    public void test_getSchemaString() throws Exception {
-        when(kafkaSchemaService.getSubjects()).thenReturn(null);
-        mockMvc.perform(get("/metadata/kafka/subject"))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getSubjects();
-    }
-
-    @Test
-    public void test_getSubjectVersions() throws Exception {
-        String subject="sss";
-        when(kafkaSchemaService.getSubjectVersions(subject)).thenReturn(null);
-        mockMvc.perform(get("/metadata/kafka/versions?subject={subject}", subject))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getSubjectVersions(subject);
-    }
-
-    @Test
-    public void test_getSubjectSchema() throws Exception {
-        String subject="ss.s";
-        String version="ss";
-        when(kafkaSchemaService.getSubjectSchema(subject, version)).thenReturn(null);
-        mockMvc.perform(get("/metadata/kafka/subjectSchema?subject={subject}&version={version}",subject,version))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getSubjectSchema(subject, version);
-    }
-
-    @Test
-    public void test_getTopLevelConfig() throws Exception {
-        when(kafkaSchemaService.getTopLevelConfig()).thenReturn(null);
-        mockMvc.perform(get("/metadata/kafka/config"))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getTopLevelConfig();
-    }
-
-    @Test
-    public void test_getSubjectLevelConfig() throws Exception {
-        String subject="sss";
-        when(kafkaSchemaService.getSubjectLevelConfig(subject)).thenReturn(null);
-        mockMvc.perform(get("/metadata/kafka/config/{subject}",subject))
-                .andExpect(status().isOk());
-        verify(kafkaSchemaService).getSubjectLevelConfig(subject);
-    }
-}
+package org.apache.griffin.core.metastore.kafka;///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//
+//package org.apache.griffin.core.metastore.kafka;
+//
+//import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
+//import org.apache.griffin.core.metastore.kafka.KafkaSchemaController;
+//import org.apache.griffin.core.metastore.kafka.KafkaSchemaServiceImpl;
+//import org.junit.Before;
+//import org.junit.Test;
+//import org.junit.runner.RunWith;
+//import org.mockito.InjectMocks;
+//import org.mockito.Mock;
+//import org.mockito.MockitoAnnotations;
+//import org.springframework.test.context.junit4.SpringRunner;
+//import org.springframework.test.web.servlet.MockMvc;
+//import org.springframework.test.web.servlet.setup.MockMvcBuilders;
+//
+//import static org.mockito.Mockito.verify;
+//import static org.mockito.Mockito.when;
+//import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+//import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+//
+//
+//@RunWith(SpringRunner.class)
+//public class KafkaSchemaControllerTest {
+//    private MockMvc mockMvc;
+//
+//    @Mock
+//    KafkaSchemaServiceImpl kafkaSchemaService;
+//
+//    @InjectMocks
+//    private KafkaSchemaController kafkaSchemaController;
+//
+//    @Before
+//    public void setup(){
+//        MockitoAnnotations.initMocks(this);
+//        this.mockMvc = MockMvcBuilders.standaloneSetup(kafkaSchemaController).build();
+//    }
+//
+//    @Test
+//    public void test_getSubjects() throws Exception {
+//        int id=1;
+//        SchemaString ss = new SchemaString();
+//        when(kafkaSchemaService.getSchemaString(id)).thenReturn(ss);
+//        mockMvc.perform(get("/metadata/kafka/schema/{id}",id))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getSchemaString(id);
+//    }
+//
+//    @Test
+//    public void test_getSchemaString() throws Exception {
+//        when(kafkaSchemaService.getSubjects()).thenReturn(null);
+//        mockMvc.perform(get("/metadata/kafka/subject"))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getSubjects();
+//    }
+//
+//    @Test
+//    public void test_getSubjectVersions() throws Exception {
+//        String subject="sss";
+//        when(kafkaSchemaService.getSubjectVersions(subject)).thenReturn(null);
+//        mockMvc.perform(get("/metadata/kafka/subject/{subject}/version",subject))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getSubjectVersions(subject);
+//    }
+//
+//    @Test
+//    public void test_getSubjectSchema() throws Exception {
+//        String subject="ss.s";
+//        String version="ss";
+//        when(kafkaSchemaService.getSubjectSchema(subject, version)).thenReturn(null);
+//        mockMvc.perform(get("/metadata/kafka/subject/{subject}/version/{version}",subject,version))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getSubjectSchema(subject, version);
+//    }
+//
+//    @Test
+//    public void test_getTopLevelConfig() throws Exception {
+//        when(kafkaSchemaService.getTopLevelConfig()).thenReturn(null);
+//        mockMvc.perform(get("/metadata/kafka/config"))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getTopLevelConfig();
+//    }
+//
+//    @Test
+//    public void test_getSubjectLevelConfig() throws Exception {
+//        String subject="sss";
+//        when(kafkaSchemaService.getSubjectLevelConfig(subject)).thenReturn(null);
+//        mockMvc.perform(get("/metadata/kafka/config/{subject}",subject))
+//                .andExpect(status().isOk());
+//        verify(kafkaSchemaService).getSubjectLevelConfig(subject);
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java
index 96b1f73..e302b8a 100644
--- a/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImplTest.java
@@ -1,142 +1,115 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.kafka;
-
-import com.sun.jersey.client.urlconnection.HTTPSProperties;
-import io.confluent.kafka.schemaregistry.client.rest.entities.Config;
-import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
-import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.core.io.ClassPathResource;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.test.util.ReflectionTestUtils;
-import org.springframework.web.client.RestTemplate;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Properties;
-
-import static org.assertj.core.api.Assertions.fail;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-@RunWith(SpringRunner.class)
-@PropertySource("classpath:application.properties")
-public class KafkaSchemaServiceImplTest {
-
-
-    @InjectMocks
-    private KafkaSchemaServiceImpl service;
-
-    @Mock
-    private RestTemplate restTemplate;
-
-
-    @Before
-    public void setup() throws IOException {
-/*
-        Properties sparkJobProperties=new Properties();
-        sparkJobProperties.load(new FileInputStream(new ClassPathResource("sparkJob.properties").getFile()));
-        ReflectionTestUtils.setField(service, "url", sparkJobProperties.getProperty("kafka.schema.registry.url"));
-*/
-    }
-
-    @Test
-    public void testGetSchemaString(){
-        try {
-
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new SchemaString(), HttpStatus.OK));
-            SchemaString tmp = service.getSchemaString(1);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-
-    @Test
-    public void testGetSubjects(){
-        try {
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new String[2], HttpStatus.OK));
-            Iterable<String> tmp = service.getSubjects();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-
-    @Test
-    public void testGetSubjectVersions(){
-        try {
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new Integer[2], HttpStatus.OK));
-            Iterable<Integer> tmp = service.getSubjectVersions("1.0");
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-
-    @Test
-    public void testGetSubjectSchema(){
-        try {
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new Schema("",0,0, ""), HttpStatus.OK));
-            Schema tmp = service.getSubjectSchema("subject1","version1");
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-
-    @Test
-    public void testGetTopLevelConfig(){
-        try {
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new Config(), HttpStatus.OK));
-            Config tmp = service.getTopLevelConfig();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-
-    @Test
-    public void testGetSubjectLevelConfig(){
-        try {
-            when(restTemplate.getForEntity(anyString(), any())).thenReturn(new ResponseEntity<>(new Config(), HttpStatus.OK));
-            Config tmp = service.getSubjectLevelConfig("");
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all tables from all dbs");
-        }
-    }
-}
+package org.apache.griffin.core.metastore.kafka;///*
+//Licensed to the Apache Software Foundation (ASF) under one
+//or more contributor license agreements.  See the NOTICE file
+//distributed with this work for additional information
+//regarding copyright ownership.  The ASF licenses this file
+//to you under the Apache License, Version 2.0 (the
+//"License"); you may not use this file except in compliance
+//with the License.  You may obtain a copy of the License at
+//
+//  http://www.apache.org/licenses/LICENSE-2.0
+//
+//Unless required by applicable law or agreed to in writing,
+//software distributed under the License is distributed on an
+//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+//KIND, either express or implied.  See the License for the
+//specific language governing permissions and limitations
+//under the License.
+//*/
+//
+//package org.apache.griffin.core.metastore.kafka;
+//
+//import io.confluent.kafka.schemaregistry.client.rest.entities.Config;
+//import io.confluent.kafka.schemaregistry.client.rest.entities.Schema;
+//import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaString;
+//import org.junit.Before;
+//import org.junit.Test;
+//import org.junit.runner.RunWith;
+//import org.mockito.Mockito;
+//import org.springframework.beans.factory.annotation.Autowired;
+//import org.springframework.boot.test.context.TestConfiguration;
+//import org.springframework.context.annotation.Bean;
+//import org.springframework.test.context.junit4.SpringRunner;
+//import org.springframework.web.client.RestTemplate;
+//
+//import static org.assertj.core.api.Assertions.fail;
+//import static org.junit.Assert.assertTrue;
+//
+//@RunWith(SpringRunner.class)
+//public class KafkaSchemaServiceImplTest {
+//    @TestConfiguration
+//    public static class KafkaSchemaServiceConfiguration {
+//        @Bean
+//        public KafkaSchemaServiceImpl service() {
+//            return new KafkaSchemaServiceImpl();
+//        }
+//    }
+//
+//    @Autowired
+//    private KafkaSchemaServiceImpl service;
+//
+//    @Before
+//    public void setup(){
+//        service.restTemplate= Mockito.mock(RestTemplate.class);
+//    }
+//
+//    @Test
+//    public void testGetSchemaString(){
+//        try {
+//            SchemaString tmp = service.getSchemaString(1);
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//
+//    @Test
+//    public void testGetSubjects(){
+//        try {
+//            Iterable<String> tmp = service.getSubjects();
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//
+//    @Test
+//    public void testGetSubjectVersions(){
+//        try {
+//            Iterable<Integer> tmp = service.getSubjectVersions("");
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//
+//    @Test
+//    public void testGetSubjectSchema(){
+//        try {
+//            Schema tmp = service.getSubjectSchema("","");
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//
+//    @Test
+//    public void testGetTopLevelConfig(){
+//        try {
+//            Config tmp = service.getTopLevelConfig();
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//
+//    @Test
+//    public void testGetSubjectLevelConfig(){
+//        try {
+//            Config tmp = service.getSubjectLevelConfig("");
+//            assertTrue(true);
+//        }catch (Throwable t){
+//            fail("Cannot get all tables from all dbs");
+//        }
+//    }
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java b/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java
index 95d8b0c..44d1a8d 100644
--- a/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metric/MetricControllerTest.java
@@ -25,12 +25,9 @@ import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
 import org.springframework.boot.test.mock.mockito.MockBean;
-import org.springframework.http.MediaType;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.test.web.servlet.MockMvc;
 
-import java.io.IOException;
-
 import static org.hamcrest.CoreMatchers.is;
 import static org.mockito.BDDMockito.given;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
@@ -39,8 +36,9 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
 
 
 @RunWith(SpringRunner.class)
-@WebMvcTest(value=MetricController.class,secure = false)
+@WebMvcTest(value = MetricController.class, secure = false)
 public class MetricControllerTest {
+
     @Autowired
     private MockMvc mvc;
 
@@ -48,21 +46,19 @@ public class MetricControllerTest {
     private MetricService service;
 
     @Before
-    public void setup(){
+    public void setup() {
     }
 
 
     @Test
-    public void testGetOrgByMeasureName() throws IOException,Exception{
-
-        given(service.getOrgByMeasureName("m14")).willReturn("bullseye");
+    public void testGetOrgByMeasureName() throws Exception {
+        String measureName = "default";
+        String org = "ebay";
+        given(service.getOrgByMeasureName(measureName)).willReturn(org);
 
-        mvc.perform(get("/orgName?measureName=m14").contentType(MediaType.APPLICATION_JSON))
-//                .andDo(print())
+        mvc.perform(get("/metrics/org").param("measureName", measureName))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$").isString())
-                .andExpect(jsonPath("$",is("bullseye")))
-        ;
+                .andExpect(jsonPath("$", is(org)));
     }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
index 084d6f8..c98f1e7 100644
--- a/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/metric/MetricServiceImplTest.java
@@ -29,15 +29,15 @@ import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.test.context.junit4.SpringRunner;
 
-import static org.assertj.core.api.Assertions.fail;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.BDDMockito.given;
 
 @RunWith(SpringRunner.class)
 public class MetricServiceImplTest {
     @TestConfiguration
-    static class MetricServiceConfiguration{
+    static class MetricServiceConfiguration {
         @Bean
-        public MetricServiceImpl service(){
+        public MetricServiceImpl service() {
             return new MetricServiceImpl();
         }
     }
@@ -49,17 +49,14 @@ public class MetricServiceImplTest {
     private MetricServiceImpl service;
 
     @Before
-    public void setup(){
+    public void setup() {
     }
 
     @Test
-    public void testGetOrgByMeasureName(){
-        try {
-            String measureName="viewitem_hourly";
-            String tmp = service.getOrgByMeasureName(measureName);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get org by measure name viewitem_hourly");
-        }
+    public void testGetOrgByMeasureName() {
+        String measureName = "default";
+        String org = "ebay";
+        given(measureRepo.findOrgByName("default")).willReturn(org);
+        assertEquals(service.getOrgByMeasureName(measureName), org);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java b/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
index fc83bef..bf3e6c0 100644
--- a/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/service/GriffinControllerTest.java
@@ -19,119 +19,63 @@ under the License.
 
 package org.apache.griffin.core.service;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
-import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
-import org.springframework.test.context.web.WebAppConfiguration;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
 
-import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-
 
 import static org.hamcrest.CoreMatchers.is;
-import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-/**
- * Created by xiangrchen on 5/16/17.
- */
-@RunWith(SpringJUnit4ClassRunner.class)
-@WebAppConfiguration
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
+
+
+@RunWith(SpringRunner.class)
+@WebMvcTest(value = GriffinController.class)
 public class GriffinControllerTest {
+
+    @Autowired
     private MockMvc mockMvc;
 
-    @Mock
+    @MockBean
     MeasureRepo measureRepo;
 
-    @InjectMocks
-    private GriffinController griffinController;
 
     @Before
-    public void setup(){
-        MockitoAnnotations.initMocks(this);
-        this.mockMvc = MockMvcBuilders.standaloneSetup(griffinController).build();
+    public void setup() {
+
     }
 
     @Test
-    public void test_greeting() throws Exception {
+    public void testGreeting() throws Exception {
         mockMvc.perform(get("/version"))
                 .andExpect(status().isOk())
                 .andExpect(content().string(is("0.1.0")));
     }
 
     @Test
-    public void test_getOrgs() throws Exception {
-        when(measureRepo.findOrganizations()).thenReturn(new ArrayList<String>());
+    public void testGetOrgs() throws Exception {
+        when(measureRepo.findOrganizations()).thenReturn(Arrays.asList("ebay"));
         mockMvc.perform(get("/org"))
-                .andExpect(status().isOk());
-        verify(measureRepo).findOrganizations();
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.[0]", is("ebay")));
     }
 
     @Test
-    public void test_getMetricNameListByOrg() throws Exception{
-        String org="hadoop";
-        when(measureRepo.findNameByOrganization(org)).thenReturn(new ArrayList<String>());
-        mockMvc.perform(get("/org/{org}",org))
+    public void testGetMetricNameListByOrg() throws Exception {
+        String org = "hadoop";
+        when(measureRepo.findNameByOrganization(org)).thenReturn(Arrays.asList(org));
+        mockMvc.perform(get("/org/{org}", org))
                 .andExpect(status().isOk())
-                .andExpect(content().string(is("[]")));
-        verify(measureRepo).findNameByOrganization(org);
+                .andExpect(jsonPath("$.[0]", is(org)));
     }
 
-    @Test
-    public void test_getOrgsWithMetrics() throws Exception{
-        String org="hadoop";
-        List<String> orgList=new ArrayList<>(Arrays.asList(org));
-        when(measureRepo.findOrganizations()).thenReturn(orgList);
-
-        when(measureRepo.findNameByOrganization(org)).thenReturn(Arrays.asList("viewitem_hourly"));
-        mockMvc.perform(get("/orgWithMetricsName"))
-                .andExpect(status().isOk());
-        verify(measureRepo).findOrganizations();
-        verify(measureRepo).findNameByOrganization(org);
-    }
 
-    @Test
-    public void test_getMeasureNameByDataAssets() throws Exception{
-        HashMap<String,String> configMap1=new HashMap<>();
-        configMap1.put("database","default");
-        configMap1.put("table.name","test_data_src");
-        HashMap<String,String> configMap2=new HashMap<>();
-        configMap2.put("database","default");
-        configMap2.put("table.name","test_data_tgt");
-        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
-        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-        EvaluateRule eRule = new EvaluateRule(1,rules);
-        Measure measure = new Measure("viewitem_hourly","bevssoj description", Measure.MearuseType.accuracy, "bullyeye", source, target, eRule,"test1");
-
-        DataConnector source2 = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target2 = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-        EvaluateRule eRule2 = new EvaluateRule(1,rules);
-        Measure measure2 = new Measure("search_hourly","test description", Measure.MearuseType.accuracy, "bullyeye", source2, target2, eRule2,"test1");
-
-        when(measureRepo.findAll()).thenReturn(Arrays.asList(measure,measure2));
-        mockMvc.perform(get("/dataAssetsNameWithMetricsName"))
-                .andExpect(status().isOk())
-                .andDo(print());
-        verify(measureRepo).findAll();
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/util/GriffinUtilTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/util/GriffinUtilTest.java b/service/src/test/java/org/apache/griffin/core/util/GriffinUtilTest.java
index a03548d..111331a 100644
--- a/service/src/test/java/org/apache/griffin/core/util/GriffinUtilTest.java
+++ b/service/src/test/java/org/apache/griffin/core/util/GriffinUtilTest.java
@@ -42,14 +42,14 @@ public class GriffinUtilTest {
     @Test
     public void test_toJson(){
         JobHealth jobHealth=new JobHealth(5,10);
-        String jobHealthStr=GriffinUtil.toJson(jobHealth);
+        String jobHealthStr= GriffinUtil.toJson(jobHealth);
         assertEquals(jobHealthStr,"{\"healthyJobCount\":5,\"jobCount\":10}");
     }
 
     @Test
     public void test_toEntity() throws IOException {
         String str="{\"healthyJobCount\":5,\"jobCount\":10}";
-        JobHealth jobHealth=GriffinUtil.toEntity(str,JobHealth.class);
+        JobHealth jobHealth= GriffinUtil.toEntity(str,JobHealth.class);
         assertEquals(jobHealth.getJobCount(),10);
         assertEquals(jobHealth.getHealthyJobCount(),5);
     }
@@ -58,7 +58,7 @@ public class GriffinUtilTest {
     public void test_toEntity1() throws IOException {
         String str="{\"aaa\":12, \"bbb\":13}";
         TypeReference<HashMap<String,Integer>> type=new TypeReference<HashMap<String,Integer>>(){};
-        Map map=GriffinUtil.toEntity(str,type);
+        Map map= GriffinUtil.toEntity(str,type);
         assertEquals(map.get("aaa"),12);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/resources/Init_quartz-h2.sql
----------------------------------------------------------------------
diff --git a/service/src/test/resources/Init_quartz-h2.sql b/service/src/test/resources/Init_quartz-h2.sql
index 913e73b..3d38ddd 100644
--- a/service/src/test/resources/Init_quartz-h2.sql
+++ b/service/src/test/resources/Init_quartz-h2.sql
@@ -1,4 +1,3 @@
-
 /*
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/resources/sparkJob.properties
----------------------------------------------------------------------
diff --git a/service/src/test/resources/sparkJob.properties b/service/src/test/resources/sparkJob.properties
deleted file mode 100644
index 4e8aace..0000000
--- a/service/src/test/resources/sparkJob.properties
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-# spark required
-sparkJob.file=hdfs:///griffin/griffin-measure.jar
-sparkJob.className=org.apache.griffin.measure.Application
-sparkJob.args_1=hdfs:///griffin/json/env.json
-sparkJob.args_3=hdfs,raw
-
-sparkJob.name=griffin
-sparkJob.queue=default
-
-# options
-sparkJob.numExecutors=2
-sparkJob.executorCores=1
-sparkJob.driverMemory=1g
-sparkJob.executorMemory=1g
-
-# shouldn't config in server, but in
-sparkJob.spark.jars.packages=com.databricks:spark-avro_2.10:2.0.1
-sparkJob.jars_1=hdfs:///livy/datanucleus-api-jdo-3.2.6.jar
-sparkJob.jars_2=hdfs:///livy/datanucleus-core-3.2.10.jar
-sparkJob.jars_3=hdfs:///livy/datanucleus-rdbms-3.2.9.jar
-
-#partitionItem
-sparkJob.dateAndHour=dt,hour
-
-#livy
-#livy.uri=http://10.9.246.187:8998/batches
-livy.uri=http://localhost:8998/batches
-
-#spark-admin
-#spark.uri=http://10.149.247.156:28088
-#spark.uri=http://10.9.246.187:8088
-spark.uri=http://localhost:8088
-

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/resources/test.sql
----------------------------------------------------------------------
diff --git a/service/src/test/resources/test.sql b/service/src/test/resources/test.sql
index 79f6cfa..93fa14a 100644
--- a/service/src/test/resources/test.sql
+++ b/service/src/test/resources/test.sql
@@ -1,44 +1,44 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-SET MODE MYSQL;
--- ----------------------------
--- Records of data_connector
--- ----------------------------
-INSERT INTO `data_connector` VALUES ('1', '2017-07-12 11:06:47', null, '{\"database\":\"default\",\"table.name\":\"data_avr\"}', 'HIVE', '1.2');
-INSERT INTO `data_connector` VALUES ('2', '2017-07-12 11:06:47', null, '{\"database\":\"default\",\"table.name\":\"cout\"}', 'HIVE', '1.2');
-INSERT INTO `data_connector` VALUES ('3', '2017-07-12 17:40:30', null, '{\"database\":\"griffin\",\"table.name\":\"avr_in\"}', 'HIVE', '1.2');
-INSERT INTO `data_connector` VALUES ('4', '2017-07-12 17:40:30', null, '{\"database\":\"griffin\",\"table.name\":\"avr_out\"}', 'HIVE', '1.2');
-
-
--- ----------------------------
--- Records of evaluate_rule
--- ----------------------------
---INSERT INTO `evaluate_rule` VALUES ('1', '2017-07-12 11:06:47', null, '$source[\'uid\'] == $target[\'url\'] AND $source[\'uage\'] == $target[\'createdts\']', '0');
-INSERT INTO `evaluate_rule` VALUES ('1', '2017-07-12 11:06:47', null, '$source[''uid''] == $target[''url''] AND $source[''uage''] == $target[''createdts'']', '0');
-
---INSERT INTO `evaluate_rule` VALUES ('2', '2017-07-12 17:40:30', null, '$source[\'id\'] == $target[\'id\'] AND $source[\'age\'] == $target[\'age\'] AND $source[\'desc\'] == $target[\'desc\']', '0');
-INSERT INTO `evaluate_rule` VALUES ('2', '2017-07-12 17:40:30', null, '$source[''id''] == $target[''id''] AND $source[''age''] == $target[''age''] AND $source[''desc''] == $target[''desc'']', '0');
-
-
--- ----------------------------
--- Records of measure
--- ----------------------------
-INSERT INTO `measure` VALUES ('1', '2017-07-12 11:06:47', null, '0', 'desc1', 'buy_rates_hourly', 'eBay', 'test', 'batch', 'accuracy', '1', '1', '2');
-INSERT INTO `measure` VALUES ('2', '2017-07-12 17:40:30', null, '0', 'desc2', 'griffin_aver', 'eBay', 'test', 'batch', 'accuracy', '2', '3', '4');
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+SET MODE MYSQL;
+-- ----------------------------
+-- Records of data_connector
+-- ----------------------------
+INSERT INTO `data_connector` VALUES ('1', '2017-07-12 11:06:47', null, '{\"database\":\"default\",\"table.name\":\"data_avr\"}', 'HIVE', '1.2');
+INSERT INTO `data_connector` VALUES ('2', '2017-07-12 11:06:47', null, '{\"database\":\"default\",\"table.name\":\"cout\"}', 'HIVE', '1.2');
+INSERT INTO `data_connector` VALUES ('3', '2017-07-12 17:40:30', null, '{\"database\":\"griffin\",\"table.name\":\"avr_in\"}', 'HIVE', '1.2');
+INSERT INTO `data_connector` VALUES ('4', '2017-07-12 17:40:30', null, '{\"database\":\"griffin\",\"table.name\":\"avr_out\"}', 'HIVE', '1.2');
+
+
+-- ----------------------------
+-- Records of evaluate_rule
+-- ----------------------------
+--INSERT INTO `evaluate_rule` VALUES ('1', '2017-07-12 11:06:47', null, '$source[\'uid\'] == $target[\'url\'] AND $source[\'uage\'] == $target[\'createdts\']', '0');
+INSERT INTO `evaluate_rule` VALUES ('1', '2017-07-12 11:06:47', null, '$source[''uid''] == $target[''url''] AND $source[''uage''] == $target[''createdts'']', '0');
+
+--INSERT INTO `evaluate_rule` VALUES ('2', '2017-07-12 17:40:30', null, '$source[\'id\'] == $target[\'id\'] AND $source[\'age\'] == $target[\'age\'] AND $source[\'desc\'] == $target[\'desc\']', '0');
+INSERT INTO `evaluate_rule` VALUES ('2', '2017-07-12 17:40:30', null, '$source[''id''] == $target[''id''] AND $source[''age''] == $target[''age''] AND $source[''desc''] == $target[''desc'']', '0');
+
+
+-- ----------------------------
+-- Records of measure
+-- ----------------------------
+INSERT INTO `measure` VALUES ('1', '2017-07-12 11:06:47', null, '0', 'desc1', 'buy_rates_hourly', 'eBay', 'test', 'batch', 'accuracy', '1', '1', '2');
+INSERT INTO `measure` VALUES ('2', '2017-07-12 17:40:30', null, '0', 'desc2', 'griffin_aver', 'eBay', 'test', 'batch', 'accuracy', '2', '3', '4');



[3/4] incubator-griffin git commit: change json format, update unit test and fix hive connect

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java b/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
index 148e5e9..300c04e 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/DataConnector.java
@@ -19,7 +19,6 @@ under the License.
 
 package org.apache.griffin.core.measure.entity;
 
-
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.type.TypeReference;
@@ -28,32 +27,17 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
 import javax.persistence.Transient;
 import java.io.IOException;
 import java.util.Map;
 
 @Entity
-public class DataConnector extends AuditableEntity  {
-    private final static Logger log = LoggerFactory.getLogger(DataConnector.class);
+public class DataConnector extends AuditableEntity {
+    private static final long serialVersionUID = -4748881017029815594L;
 
-    private static final long serialVersionUID = -4748881017029815794L;
-    
-    public enum ConnectorType {
-        HIVE
-    }
-    
-    @Enumerated(EnumType.STRING)
-    private ConnectorType type;
+    private final static Logger LOGGER = LoggerFactory.getLogger(DataConnector.class);
 
-    public String getVersion() {
-        return version;
-    }
-
-    public void setVersion(String version) {
-        this.version = version;
-    }
+    private String type;
 
     private String version;
 
@@ -61,57 +45,59 @@ public class DataConnector extends AuditableEntity  {
 
     @JsonIgnore
     @Transient
-    private Map<String,String> configInMaps;
+    private Map<String, String> configInMaps;
 
-    public Map<String,String> getConfigInMaps() {
-        TypeReference<Map<String,String>> mapType=new TypeReference<Map<String,String>>(){};
+    public Map<String, String> getConfigInMaps() {
+        TypeReference<Map<String, String>> mapType = new TypeReference<Map<String, String>>() {
+        };
         if (this.configInMaps == null) {
             try {
                 this.configInMaps = GriffinUtil.toEntity(config, mapType);
             } catch (IOException e) {
-                log.error("Error in converting json to map",e);
+                LOGGER.error("Error in converting json to map. {}", e.getMessage());
             }
         }
         return configInMaps;
     }
 
-    public void setConfig(Map<String,String> configInMaps) throws JsonProcessingException {
-        String configJson = GriffinUtil.toJson(configInMaps);
-        this.config = configJson;
+    public void setConfig(Map<String, String> configInMaps) throws JsonProcessingException {
+        this.config = GriffinUtil.toJson(configInMaps);
+    }
+
+    public Map<String, String> getConfig() {
+        return getConfigInMaps();
     }
 
-    public ConnectorType getType() {
+    public String getType() {
         return type;
     }
 
-    public void setType(ConnectorType type) {
+    public void setType(String type) {
         this.type = type;
     }
 
-    public Map<String,String> getConfig() {
-        return getConfigInMaps();
+    public String getVersion() {
+        return version;
     }
 
-
-    public DataConnector() {
+    public void setVersion(String version) {
+        this.version = version;
     }
 
-    public DataConnector(ConnectorType type,String version, Map<String,String> config){
-        this.type = type;
-        this.version = version;
-        this.configInMaps = config;
-        this.config = GriffinUtil.toJson(configInMaps);
+
+    public DataConnector() {
     }
 
-    public DataConnector(ConnectorType type, String version, String config) {
+    public DataConnector(String type, String version, String config) {
         this.type = type;
         this.version = version;
         this.config = config;
-        TypeReference<Map<String,String>> mapType=new TypeReference<Map<String,String>>(){};
+        TypeReference<Map<String, String>> mapType = new TypeReference<Map<String, String>>() {
+        };
         try {
-            this.configInMaps = GriffinUtil.toEntity(config,mapType);
+            this.configInMaps = GriffinUtil.toEntity(config, mapType);
         } catch (IOException e) {
-            log.error("Error in converting json to map",e);
+            LOGGER.error("Error in converting json to map. {}", e.getMessage());
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java b/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
new file mode 100644
index 0000000..18d8fc7
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/DataSource.java
@@ -0,0 +1,59 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.measure.entity;
+
+
+import javax.persistence.*;
+import java.util.List;
+
+@Entity
+public class DataSource extends AuditableEntity {
+    private static final long serialVersionUID = -4748881017079815794L;
+
+    private String name;
+
+    @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
+    @JoinColumn(name = "dataSource_id")
+    private List<DataConnector> connectors;
+
+    public String getName() {
+        return name;
+    }
+
+    public void setName(String name) {
+        this.name = name;
+    }
+
+    public List<DataConnector> getConnectors() {
+        return connectors;
+    }
+
+    public void setConnectors(List<DataConnector> connectors) {
+        this.connectors = connectors;
+    }
+
+    public DataSource() {
+    }
+
+    public DataSource(String name, List<DataConnector> connectors) {
+        this.name = name;
+        this.connectors = connectors;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java b/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
index 0feda4f..e114537 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/EvaluateRule.java
@@ -17,45 +17,38 @@ specific language governing permissions and limitations
 under the License.
 */
 
-
 package org.apache.griffin.core.measure.entity;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Lob;
 
-@Entity
-public class EvaluateRule  extends AuditableEntity {
-    
-    private static final long serialVersionUID = -3589222812544556642L;
+import org.hibernate.annotations.Fetch;
+import org.hibernate.annotations.FetchMode;
 
-    public int getSampleRatio() {
-        return sampleRatio;
-    }
+import javax.persistence.*;
+import java.util.List;
 
-    public void setSampleRatio(int sampleRatio) {
-        this.sampleRatio = sampleRatio;
-    }
 
-    public String getRules() {
+@Entity
+public class EvaluateRule extends AuditableEntity {
+    private static final long serialVersionUID = 4240072518233967528L;
+
+    @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
+    @JoinColumn(name = "evaluateRule_id")
+    @Fetch(FetchMode.SUBSELECT)
+    private List<Rule> rules;
+
+    public List<Rule> getRules() {
         return rules;
     }
 
-    public void setRules(String rules) {
+    public void setRules(List<Rule> rules) {
         this.rules = rules;
     }
 
-    private int sampleRatio;
-
-    @Lob
-    @Column(length=1048576) //2^20=1048576
-    private String rules;
-    
     public EvaluateRule() {
     }
 
-    public EvaluateRule(int sampleRatio, String rules) {
-        this.sampleRatio = sampleRatio;
+    public EvaluateRule(List<Rule> rules) {
         this.rules = rules;
     }
 }
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java b/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
index 1bbfe0a..18ce2e2 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/Measure.java
@@ -19,54 +19,35 @@ under the License.
 
 package org.apache.griffin.core.measure.entity;
 
-
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 import javax.persistence.*;
-
+import java.util.List;
 
 @Entity
-public class Measure extends AuditableEntity   {
-
-    private static final long serialVersionUID = -4748881017029815794L;
+public class Measure extends AuditableEntity {
+    private static final long serialVersionUID = -4748881017029815714L;
 
     private String name;
 
     private String description;
 
-    public enum MearuseType {
-        accuracy,
-    }
-
-    public enum ProcessType{
-        batch,
-        streaming
-    }
-
     private String organization;
-    @Enumerated(EnumType.STRING)
-    private MearuseType type;
 
-    @Enumerated(EnumType.STRING)
-    private ProcessType processType=ProcessType.batch;
+    private String processType;
 
-    @ManyToOne(fetch = FetchType.EAGER,cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
-    @JoinColumn(name = "source_id")
-    private DataConnector source;
 
-    @ManyToOne(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
-    @JoinColumn(name = "target_id")
-    private DataConnector target;
+    @OneToMany(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
+    @JoinColumn(name = "measure_id")
+    private List<DataSource> dataSources;
 
     @OneToOne(fetch = FetchType.EAGER, cascade = {CascadeType.PERSIST, CascadeType.REMOVE})
     @JoinColumn(name = "evaluateRule_id")
     private EvaluateRule evaluateRule;
 
-    /**
-     * owner means owner name
-     */
     private String owner;
     private Boolean deleted = false;
+
     public String getName() {
         return name;
     }
@@ -99,38 +80,24 @@ public class Measure extends AuditableEntity   {
         this.owner = owner;
     }
 
-    public MearuseType getType() {
-        return type;
-    }
-
-    public void setType(MearuseType type) {
-        this.type = type;
-    }
-
     @JsonProperty("process.type")
-    public ProcessType getProcessType() {
+    public String getProcessType() {
         return processType;
     }
 
     @JsonProperty("process.type")
-    public void setProcessType(ProcessType processType) {
+    public void setProcessType(String processType) {
         this.processType = processType;
     }
 
-    public DataConnector getSource() {
-        return source;
-    }
-
-    public void setSource(DataConnector source) {
-        this.source = source;
-    }
-
-    public DataConnector getTarget() {
-        return target;
+    @JsonProperty("data.sources")
+    public List<DataSource> getDataSources() {
+        return dataSources;
     }
 
-    public void setTarget(DataConnector target) {
-        this.target = target;
+    @JsonProperty("data.sources")
+    public void setDataSources(List<DataSource> dataSources) {
+        this.dataSources = dataSources;
     }
 
     public EvaluateRule getEvaluateRule() {
@@ -152,14 +119,13 @@ public class Measure extends AuditableEntity   {
     public Measure() {
     }
 
-    public Measure(String name, String description, MearuseType type, String organization, DataConnector source, DataConnector target, EvaluateRule evaluateRule, String owner) {
+    public Measure(String name, String description, String organization, String processType, String owner, List<DataSource> dataSources, EvaluateRule evaluateRule) {
         this.name = name;
-        this.description=description;
+        this.description = description;
         this.organization = organization;
-        this.type = type;
-        this.source = source;
-        this.target = target;
-        this.evaluateRule = evaluateRule;
+        this.processType = processType;
         this.owner = owner;
+        this.dataSources = dataSources;
+        this.evaluateRule = evaluateRule;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java b/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
new file mode 100644
index 0000000..ca24073
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/Rule.java
@@ -0,0 +1,75 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.measure.entity;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import javax.persistence.Column;
+import javax.persistence.Entity;
+
+
+@Entity
+public class Rule extends AuditableEntity {
+
+    //three type:1.griffin-dsl 2.df-opr 3.spark-sql
+    private String dslType;
+
+    private String dqType;
+
+    @Column(length = 1024)
+    private String rule;
+
+    @JsonProperty("dsl.type")
+    public String getDslType() {
+        return dslType;
+    }
+
+    @JsonProperty("dsl.type")
+    public void setDslType(String dslType) {
+        this.dslType = dslType;
+    }
+
+    @JsonProperty("dq.type")
+    public String getDqType() {
+        return dqType;
+    }
+
+    @JsonProperty("dq.type")
+    public void setDqType(String dqType) {
+        this.dqType = dqType;
+    }
+
+    public String getRule() {
+        return rule;
+    }
+
+    public void setRule(String rule) {
+        this.rule = rule;
+    }
+
+    public Rule() {
+    }
+
+    public Rule(String dslType, String dqType, String rule) {
+        this.dslType = dslType;
+        this.dqType = dqType;
+        this.rule = rule;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/repo/DataConnectorRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/DataConnectorRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/DataConnectorRepo.java
index 6317116..57e75ba 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/repo/DataConnectorRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/DataConnectorRepo.java
@@ -26,6 +26,4 @@ import org.springframework.stereotype.Repository;
 
 @Repository
 public interface DataConnectorRepo extends CrudRepository<DataConnector, Long> {
-
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/repo/DataSourceRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/DataSourceRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/DataSourceRepo.java
new file mode 100644
index 0000000..5cc897c
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/DataSourceRepo.java
@@ -0,0 +1,26 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.measure.repo;
+
+import org.apache.griffin.core.measure.entity.DataSource;
+import org.springframework.data.repository.CrudRepository;
+
+public interface DataSourceRepo extends CrudRepository<DataSource,Long> {
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/repo/EvaluateRuleRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/EvaluateRuleRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/EvaluateRuleRepo.java
index 83b8123..9a676c5 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/repo/EvaluateRuleRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/EvaluateRuleRepo.java
@@ -24,6 +24,4 @@ import org.apache.griffin.core.measure.entity.EvaluateRule;
 import org.springframework.data.repository.CrudRepository;
 
 public interface EvaluateRuleRepo extends CrudRepository<EvaluateRule, Long> {
-
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
index 67dd70a..1e6ac0d 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/MeasureRepo.java
@@ -20,22 +20,21 @@ under the License.
 package org.apache.griffin.core.measure.repo;
 
 
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
 import org.apache.griffin.core.measure.entity.Measure;
-import org.springframework.data.jpa.repository.Modifying;
 import org.springframework.data.jpa.repository.Query;
 import org.springframework.data.repository.CrudRepository;
 import org.springframework.stereotype.Repository;
-import org.springframework.transaction.annotation.Transactional;
 
 import java.util.List;
 
 @Repository
 public interface MeasureRepo extends CrudRepository<Measure, Long> {
     List<Measure> findByNameAndDeleted(String name, Boolean deleted);
+
     List<Measure> findByDeleted(Boolean deleted);
+
     List<Measure> findByOwnerAndDeleted(String owner, Boolean deleted);
+
     @Query("select DISTINCT m.organization from Measure m")
     List<String> findOrganizations();
 
@@ -43,13 +42,13 @@ public interface MeasureRepo extends CrudRepository<Measure, Long> {
             "where m.organization= ?1")
     List<String> findNameByOrganization(String organization);
 
-    @Query("select m.organization from Measure m "+
+    @Query("select m.organization from Measure m " +
             "where m.name= ?1")
     String findOrgByName(String measureName);
 
-    @Modifying
-    @Transactional
-    @Query("update Measure m "+
-            "set m.description= ?2,m.organization= ?3,m.source= ?4,m.target= ?5,m.evaluateRule= ?6 where m.id= ?1")
-    void update(Long Id, String description, String organization, DataConnector source, DataConnector target, EvaluateRule evaluateRule);
+//    @Modifying
+//    @Transactional
+//    @Query("update Measure m "+
+//            "set m.description= ?2,m.organization= ?3,m.source= ?4,m.target= ?5,m.evaluateRule= ?6 where m.id= ?1")
+//    void update(Long Id, String description, String organization, DataConnector source, DataConnector target, EvaluateRule evaluateRule);
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/repo/RuleRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/repo/RuleRepo.java b/service/src/main/java/org/apache/griffin/core/measure/repo/RuleRepo.java
new file mode 100644
index 0000000..75adce5
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/measure/repo/RuleRepo.java
@@ -0,0 +1,26 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.measure.repo;
+
+import org.apache.griffin.core.measure.entity.Rule;
+import org.springframework.data.repository.CrudRepository;
+
+public interface RuleRepo extends CrudRepository<Rule,Long> {
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
new file mode 100644
index 0000000..76a32c3
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreController.java
@@ -0,0 +1,73 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+package org.apache.griffin.core.metastore.hive;
+
+
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.web.bind.annotation.*;
+
+import java.util.List;
+import java.util.Map;
+
+@RestController
+@RequestMapping("/metadata/hive")
+public class HiveMetaStoreController {
+
+    @Autowired
+    HiveMetaStoreService hiveMetaStoreService;
+
+
+    @RequestMapping(value = "/db", method = RequestMethod.GET)
+    public Iterable<String> getAllDatabases() {
+        return hiveMetaStoreService.getAllDatabases();
+    }
+
+    @RequestMapping(value = "/table", method = RequestMethod.GET)
+    public Iterable<String> getDefAllTables() {
+        return hiveMetaStoreService.getAllTableNames("");
+    }
+
+    @RequestMapping(value = "/allTableNames", method = RequestMethod.GET)
+    public Iterable<String> getAllTableNames(@RequestParam("db") String dbName) {
+        return hiveMetaStoreService.getAllTableNames(dbName);
+    }
+
+    @RequestMapping(value = "/db/allTables", method = RequestMethod.GET)
+    public List<Table> getAllTables(@RequestParam("db") String dbName) {
+        return hiveMetaStoreService.getAllTable(dbName);
+    }
+
+    @RequestMapping(value = "/allTables", method = RequestMethod.GET)
+    public Map<String, List<Table>> getAllTables() {
+        return hiveMetaStoreService.getAllTable();
+    }
+
+    @RequestMapping(value = "/default/{table}", method = RequestMethod.GET)
+    public Table getDefTable(@PathVariable("table") String tableName) {
+        return hiveMetaStoreService.getTable("", tableName);
+    }
+
+    @RequestMapping(value = "", method = RequestMethod.GET)
+    public Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName) {
+        return hiveMetaStoreService.getTable(dbName, tableName);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
new file mode 100644
index 0000000..4df5796
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreProxy.java
@@ -0,0 +1,78 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PreDestroy;
+
+@Component
+public class HiveMetaStoreProxy {
+    private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetaStoreProxy.class);
+
+    @Value("${hive.metastore.uris}")
+    private String uris;
+
+    /**
+     * Set attempts and interval for HiveMetastoreClient to retry.
+     *
+     * @hive.hmshandler.retry.attempts: The number of times to retry a HMSHandler call if there were a connection error.
+     * @hive.hmshandler.retry.interval: The time between HMSHandler retry attempts on failure.
+     */
+    @Value("${hive.hmshandler.retry.attempts}")
+    private int attempts;
+
+    @Value("${hive.hmshandler.retry.interval}")
+    private String interval;
+
+    private HiveMetaStoreClient client = null;
+
+    @Bean
+    public HiveMetaStoreClient initHiveMetastoreClient() {
+        HiveConf hiveConf = new HiveConf();
+        hiveConf.set("hive.metastore.local", "false");
+        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, uris);
+        hiveConf.setIntVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, attempts);
+        hiveConf.setVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, interval);
+        try {
+            client = new HiveMetaStoreClient(hiveConf);
+        } catch (MetaException e) {
+            LOGGER.error("Failed to connect hive metastore. {}", e.getMessage());
+            client = null;
+        }
+
+        return client;
+    }
+
+    @PreDestroy
+    public void destroy() throws Exception {
+        if (null != client) {
+            client.close();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreService.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreService.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreService.java
new file mode 100644
index 0000000..e9a1bbd
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreService.java
@@ -0,0 +1,39 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.api.Table;
+
+import java.util.List;
+import java.util.Map;
+
+public interface HiveMetaStoreService {
+
+    Iterable<String> getAllDatabases();
+
+    Iterable<String> getAllTableNames(String dbName);
+
+    List<Table> getAllTable(String db);
+
+    Map<String, List<Table>> getAllTable();
+
+    Table getTable(String dbName, String tableName);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
new file mode 100644
index 0000000..1386000
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImpl.java
@@ -0,0 +1,162 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.cache.annotation.CacheConfig;
+import org.springframework.cache.annotation.Cacheable;
+import org.springframework.stereotype.Service;
+import org.springframework.util.StringUtils;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+
+@Service
+@CacheConfig(cacheNames = "hive")
+public class HiveMetaStoreServiceImpl implements HiveMetaStoreService {
+
+    private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetaStoreService.class);
+
+    @Autowired
+    private HiveMetaStoreClient client;
+
+    @Value("${hive.metastore.dbname}")
+    private String defaultDbName;
+
+    private ThreadPoolExecutor singleThreadExecutor;
+
+    public HiveMetaStoreServiceImpl() {
+        singleThreadExecutor = new ThreadPoolExecutor(1, 1, 3, TimeUnit.SECONDS, new ArrayBlockingQueue<>(1));
+        LOGGER.info("HiveMetaStoreServiceImpl single thread pool created.");
+    }
+
+    private String getUseDbName(String dbName) {
+        if (!StringUtils.hasText(dbName))
+            return defaultDbName;
+        else
+            return dbName;
+    }
+
+    @Override
+    @Cacheable
+
+    public Iterable<String> getAllDatabases() {
+        Iterable<String> results = null;
+        try {
+            results = client.getAllDatabases();
+        } catch (MetaException e) {
+            reconnect();
+            LOGGER.error("Can not get databases : {}", e.getMessage());
+        }
+        return results;
+    }
+
+
+    @Override
+    @Cacheable
+    public Iterable<String> getAllTableNames(String dbName) {
+        Iterable<String> results = null;
+        try {
+            results = client.getAllTables(getUseDbName(dbName));
+        } catch (Exception e) {
+            reconnect();
+            LOGGER.error("Exception fetching tables info: {}", e.getMessage());
+        }
+        return results;
+    }
+
+
+    @Override
+    @Cacheable
+    public List<Table> getAllTable(String db) {
+        return getTables(db);
+    }
+
+
+    @Override
+    @Cacheable
+    public Map<String, List<Table>> getAllTable() {
+        Map<String, List<Table>> results = new HashMap<>();
+        Iterable<String> dbs = getAllDatabases();
+        //MetaException happens
+        if (dbs == null)
+            return results;
+        for (String db : dbs) {
+            results.put(db, getTables(db));
+        }
+        return results;
+    }
+
+
+    @Override
+    @Cacheable
+    public Table getTable(String dbName, String tableName) {
+        Table result = null;
+        try {
+            result = client.getTable(getUseDbName(dbName), tableName);
+        } catch (Exception e) {
+            reconnect();
+            LOGGER.error("Exception fetching table info : {}. {}", tableName, e.getMessage());
+        }
+        return result;
+    }
+
+
+    private List<Table> getTables(String db) {
+        String useDbName = getUseDbName(db);
+        List<Table> allTables = new ArrayList<>();
+        try {
+            Iterable<String> tables = client.getAllTables(useDbName);
+            for (String table : tables) {
+                Table tmp = client.getTable(db, table);
+                allTables.add(tmp);
+            }
+        } catch (Exception e) {
+            reconnect();
+            LOGGER.error("Exception fetching tables info: {}", e.getMessage());
+        }
+        return allTables;
+    }
+
+    private void reconnect() {
+        if (singleThreadExecutor.getActiveCount() == 0) {
+            System.out.println("execute create thread.");
+            singleThreadExecutor.execute(() -> {
+                try {
+                    client.reconnect();
+                } catch (MetaException e) {
+                    LOGGER.error("reconnect to hive failed.");
+                }
+            });
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreController.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreController.java
deleted file mode 100644
index 169051e..0000000
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreController.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-package org.apache.griffin.core.metastore.hive;
-
-
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.web.bind.annotation.*;
-
-import java.util.List;
-import java.util.Map;
-
-@RestController
-@RequestMapping("/metadata/hive")
-public class HiveMetastoreController {
-
-    @Autowired
-    HiveMetastoreServiceImpl hiveMetastoreService;
-
-    @RequestMapping(value = "/db",method = RequestMethod.GET)
-    public Iterable<String> getAllDatabases()  {
-        return hiveMetastoreService.getAllDatabases();
-    }
-
-    @RequestMapping(value = "/allTableNames",method = RequestMethod.GET)
-    public Iterable<String> getAllTableNames(@RequestParam("db") String dbName)  {
-        return hiveMetastoreService.getAllTableNames(dbName);
-    }
-
-    @RequestMapping(value = "/db/allTables",method = RequestMethod.GET)
-    public List<Table> getAllTables(@RequestParam("db") String dbName)  {
-        return hiveMetastoreService.getAllTablesByDbName(dbName);
-    }
-
-    @RequestMapping(value = "/allTables",method = RequestMethod.GET)
-    public Map<String,List<Table>> getAllTables()  {
-        return hiveMetastoreService.getAllTable();
-    }
-
-    @RequestMapping(value = "table",method = RequestMethod.GET)
-    public Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName)  {
-        return hiveMetastoreService.getTable(dbName, tableName);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreProxy.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreProxy.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreProxy.java
deleted file mode 100644
index af080e0..0000000
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreProxy.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.context.annotation.Bean;
-import org.springframework.stereotype.Component;
-
-import javax.annotation.PreDestroy;
-
-@Component
-public class HiveMetastoreProxy
-{
-    private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetastoreProxy.class);
-
-    @Value("${hive.metastore.uris}")
-    private String uris;
-
-    /**
-     * Set attempts and interval for HiveMetastoreClient to retry.
-     * @hive.hmshandler.retry.attempts: The number of times to retry a HMSHandler call if there were a connection error.
-     * @hive.hmshandler.retry.interval: The time between HMSHandler retry attempts on failure.
-     */
-    @Value("${hive.hmshandler.retry.attempts}")
-    private int attempts;
-
-    @Value("${hive.hmshandler.retry.interval}")
-    private String interval;
-
-    private HiveMetaStoreClient client = null;
-
-    @Bean
-    public HiveMetaStoreClient initHiveMetastoreClient(){
-        HiveConf hiveConf = new HiveConf();
-        hiveConf.set("hive.metastore.local", "false");
-        hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
-        hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, uris);
-        hiveConf.setIntVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, attempts);
-        hiveConf.setVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, interval);
-        try {
-            client= new HiveMetaStoreClient(hiveConf);
-        } catch (MetaException e) {
-            LOGGER.error("Failed to connect hive metastore",e.getMessage());
-            client = null;
-        }
-
-        return client;
-    }
-
-    @PreDestroy
-    public void destroy() throws Exception {
-        if(null!=client) {
-            client.close();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreService.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreService.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreService.java
deleted file mode 100644
index 34eab57..0000000
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreService.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.apache.hadoop.hive.metastore.api.Table;
-
-import java.util.List;
-import java.util.Map;
-
-public interface HiveMetastoreService {
-
-    Iterable<String> getAllDatabases() ;
-
-    Iterable<String> getAllTableNames(String dbName) ;
-
-    List<Table> getAllTablesByDbName(String db) ;
-
-    Map<String,List<Table>> getAllTable() ;
-
-    Table getTable(String dbName, String tableName) ;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImpl.java
deleted file mode 100644
index 2f95b19..0000000
--- a/service/src/main/java/org/apache/griffin/core/metastore/hive/HiveMetastoreServiceImpl.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.apache.griffin.core.error.exception.GriffinException.HiveConnectionException;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.cache.annotation.CacheConfig;
-import org.springframework.cache.annotation.Cacheable;
-import org.springframework.stereotype.Service;
-import org.springframework.util.StringUtils;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-@Service
-@CacheConfig(cacheNames = "hive")
-public class HiveMetastoreServiceImpl implements HiveMetastoreService{
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(HiveMetastoreServiceImpl.class);
-
-    @Autowired
-    private HiveMetaStoreClient client;
-    //TODO: wrap HiveMetaStoreClient to manage hive connection, when hive connection fails, evict hive cache.
-    //Note that if the hive connection is down, the following methods won't throw exception because it isn't executed
-    //because of cache.
-
-    @Value("${hive.metastore.dbname}")
-    private String defaultDbName;
-
-    private String getUseDbName(String dbName) {
-        if (!StringUtils.hasText(dbName)) return defaultDbName;
-        else return dbName;
-    }
-
-    @Override
-    @Cacheable
-    public Iterable<String> getAllDatabases() {
-        Iterable<String> results = null;
-        try {
-            results = client.getAllDatabases();
-        } catch (MetaException e) {
-            reconnect();
-            LOGGER.error("Can not get databases : ",e.getMessage());
-        }
-        return results;
-    }
-
-
-    @Override
-    @Cacheable
-    public Iterable<String> getAllTableNames(String dbName) {
-        Iterable<String> results = null;
-        String useDbName = getUseDbName(dbName);
-        try {
-            results = client.getAllTables(useDbName);
-        } catch (Exception e) {
-            reconnect();
-            LOGGER.error("Exception fetching tables info: " + e.getMessage());
-        }
-        return results;
-    }
-
-
-    @Cacheable
-    public List<Table> getAllTablesByDbName(String db) {
-        List<Table> results = new ArrayList<Table>();
-        String useDbName = getUseDbName(db);
-        try {
-            Iterable<String> tables = client.getAllTables(useDbName);
-            for (String table: tables) {
-                Table tmp = client.getTable(db,table);
-                results.add(tmp);
-            }
-        } catch (Exception e) {
-            reconnect();
-            LOGGER.error("Exception fetching tables info: " + e.getMessage());
-        }
-        return results;
-    }
-
-
-    @Override
-    @Cacheable
-    public Map<String,List<Table>> getAllTable() {
-        Map<String,List<Table>> results = new HashMap<String, List<Table>>();
-        Iterable<String> dbs = getAllDatabases();
-        for(String db: dbs){
-            List<Table> alltables = new ArrayList<Table>();
-            String useDbName = getUseDbName(db);
-            try {
-                Iterable<String> tables = client.getAllTables(useDbName);
-                for (String table: tables) {
-                    Table tmp = client.getTable(db,table);
-                    alltables.add(tmp);
-                }
-            } catch (Exception e) {
-                reconnect();
-                LOGGER.error("Exception fetching tables info: " + e.getMessage());
-            }
-            results.put(db,alltables);
-        }
-        return results;
-    }
-
-
-    @Override
-    @Cacheable
-    public Table getTable(String dbName, String tableName) {
-        Table result = null;
-        String useDbName = getUseDbName(dbName);
-        try {
-            result = client.getTable(useDbName, tableName);
-        } catch (Exception e) {
-            reconnect();
-            LOGGER.error("Exception fetching table info : " +tableName + " : " + e.getMessage());
-        }
-        return result;
-    }
-
-    private void reconnect() {
-        try {
-            client.reconnect();
-        } catch (MetaException e) {
-            LOGGER.error("reconnect to hive failed. ");
-            throw new HiveConnectionException();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
index bb55e1c..a5bed89 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaController.java
@@ -28,36 +28,36 @@ import org.springframework.web.bind.annotation.*;
 @RestController
 @RequestMapping("/metadata/kafka")
 public class KafkaSchemaController {
-    //TODO subject should be replaced with topic
+
     @Autowired
     KafkaSchemaServiceImpl kafkaSchemaService;
 
-    @RequestMapping(value = "/schema/{id}",method = RequestMethod.GET)
+    @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET)
     public SchemaString getSchemaString(@PathVariable("id") Integer id) {
         return kafkaSchemaService.getSchemaString(id);
     }
 
-    @RequestMapping(value = "/subject",method = RequestMethod.GET)
+    @RequestMapping(value = "/subject", method = RequestMethod.GET)
     public Iterable<String> getSubjects() {
         return kafkaSchemaService.getSubjects();
     }
 
-    @RequestMapping(value = "/versions",method = RequestMethod.GET)
+    @RequestMapping(value = "/versions", method = RequestMethod.GET)
     public Iterable<Integer> getSubjectVersions(@RequestParam("subject") String subject) {
         return kafkaSchemaService.getSubjectVersions(subject);
     }
 
-    @RequestMapping(value = "/subjectSchema",method = RequestMethod.GET)
+    @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET)
     public Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version) {
         return kafkaSchemaService.getSubjectSchema(subject, version);
     }
 
-    @RequestMapping(value = "/config",method = RequestMethod.GET)
+    @RequestMapping(value = "/config", method = RequestMethod.GET)
     public Config getTopLevelConfig() {
         return kafkaSchemaService.getTopLevelConfig();
     }
 
-    @RequestMapping(value = "/config/{subject}",method = RequestMethod.GET)
+    @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET)
     public Config getSubjectLevelConfig(@PathVariable("subject") String subject) {
         return kafkaSchemaService.getSubjectLevelConfig(subject);
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
index 3b3b81f..79723ca 100644
--- a/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metastore/kafka/KafkaSchemaServiceImpl.java
@@ -33,7 +33,7 @@ import org.springframework.web.client.RestTemplate;
 import java.util.Arrays;
 
 @Service
-public class KafkaSchemaServiceImpl implements KafkaSchemaService{
+public class KafkaSchemaServiceImpl implements KafkaSchemaService {
 
     private static final Logger log = LoggerFactory.getLogger(KafkaSchemaServiceImpl.class);
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
index e68ec8f..e0e9c9a 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricController.java
@@ -32,12 +32,14 @@ import org.springframework.web.bind.annotation.RestController;
  */
 
 @RestController
+@RequestMapping("/metrics")
 public class MetricController {
     private static final Logger LOGGER = LoggerFactory.getLogger(MetricController.class);
     @Autowired
     MetricService metricService;
-    @RequestMapping(value = "/orgName",method = RequestMethod.GET)
+
+    @RequestMapping(value = "/org", method = RequestMethod.GET)
     public String getOrgByMeasureName(@RequestParam("measureName") String measureName) {
         return metricService.getOrgByMeasureName(measureName);
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java b/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
index 12e518d..69a2b8c 100644
--- a/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/metric/MetricServiceImpl.java
@@ -25,9 +25,10 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
 @Service
-public class MetricServiceImpl implements MetricService{
+public class MetricServiceImpl implements MetricService {
     @Autowired
-    MeasureRepo measureRepo;
+    private MeasureRepo measureRepo;
+
     @Override
     public String getOrgByMeasureName(String measureName) {
         return measureRepo.findOrgByName(measureName);

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/service/GriffinController.java b/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
index 0024cef..b7aff53 100644
--- a/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
+++ b/service/src/main/java/org/apache/griffin/core/service/GriffinController.java
@@ -20,7 +20,6 @@ under the License.
 package org.apache.griffin.core.service;
 
 
-import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -30,7 +29,9 @@ import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RestController;
 
-import java.util.*;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 
 @RestController
@@ -40,65 +41,37 @@ public class GriffinController {
     @Autowired
     MeasureRepo measureRepo;
 
-    @RequestMapping(value = "/version",method = RequestMethod.GET)
+    @RequestMapping(value = "/version", method = RequestMethod.GET)
     public String greeting() {
         return "0.1.0";
     }
 
-    @RequestMapping(value = "/org",method = RequestMethod.GET)
-    public List<String> getOrgs(){
+    @RequestMapping(value = "/org", method = RequestMethod.GET)
+    public List<String> getOrgs() {
         return measureRepo.findOrganizations();
     }
 
     /**
-     *
      * @param org
      * @return list of the name of metric, and a metric is the result of executing the job sharing the same name with
      * measure.
      */
-    @RequestMapping(value = "/org/{org}",method = RequestMethod.GET)
-    public List<String> getMetricNameListByOrg(@PathVariable("org") String org){
+    @RequestMapping(value = "/org/{org}", method = RequestMethod.GET)
+    public List<String> getMetricNameListByOrg(@PathVariable("org") String org) {
         return measureRepo.findNameByOrganization(org);
     }
 
-    @RequestMapping(value = "/orgWithMetricsName",method = RequestMethod.GET)
-    public Map<String,List<String>> getOrgsWithMetricsName(){
-        Map<String,List<String>> orgWithMetricsMap=new HashMap<>();
-        List<String> orgList=measureRepo.findOrganizations();
-        for (String org:orgList){
-            if(org!=null){
-                orgWithMetricsMap.put(org,measureRepo.findNameByOrganization(org));
+    @RequestMapping(value = "/orgWithMetricsName", method = RequestMethod.GET)
+    public Map<String, List<String>> getOrgsWithMetricsName() {
+        Map<String, List<String>> orgWithMetricsMap = new HashMap<>();
+        List<String> orgList = measureRepo.findOrganizations();
+        for (String org : orgList) {
+            if (org != null) {
+                orgWithMetricsMap.put(org, measureRepo.findNameByOrganization(org));
             }
         }
         return orgWithMetricsMap;
     }
 
-    @RequestMapping(value = "/dataAssetsNameWithMetricsName",method = RequestMethod.GET)
-    public Map<String,List<String>> getDataAssetsNameWithMetricsName(){
-        Map<String,List<String>> daWithMetricsMap=new HashMap<>();
-        Iterable<Measure> measureList=measureRepo.findAll();
-        for (Measure m:measureList){
-            switch (m.getType()){
-                case accuracy:
-                    String[] tableNames={m.getSource().getConfigInMaps().get("table.name"),m.getTarget().getConfigInMaps().get("table.name")};
-                    for (String taName:tableNames){
-                        if(taName!=null) {
-                            if(daWithMetricsMap.get(taName)==null){
-                                daWithMetricsMap.put(taName, new ArrayList<>(Arrays.asList(m.getName())));
-                            }else{
-                                List<String> measureNameList=daWithMetricsMap.get(taName);
-                                measureNameList.add(m.getName());
-                                daWithMetricsMap.put(taName, measureNameList);
-                            }
-                        }
-                    }
-                    break;
-                default:
-                    LOGGER.info("invalid measure type!");
-            }
-        }
-        return daWithMetricsMap;
-    }
-
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/util/GriffinOperationMessage.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/util/GriffinOperationMessage.java b/service/src/main/java/org/apache/griffin/core/util/GriffinOperationMessage.java
index 0158d04..dd4d895 100644
--- a/service/src/main/java/org/apache/griffin/core/util/GriffinOperationMessage.java
+++ b/service/src/main/java/org/apache/griffin/core/util/GriffinOperationMessage.java
@@ -26,29 +26,28 @@ import com.fasterxml.jackson.annotation.JsonFormat;
 public enum GriffinOperationMessage {
     //success
     CREATE_MEASURE_SUCCESS(201, "Create Measure Succeed"),
-    DELETE_MEASURE_BY_ID_SUCCESS(202,"Delete Measures By Name Succeed"),
+    DELETE_MEASURE_BY_ID_SUCCESS(202, "Delete Measures By Name Succeed"),
     DELETE_MEASURE_BY_NAME_SUCCESS(203, "Delete Measures By Name Succeed"),
     UPDATE_MEASURE_SUCCESS(204, "Update Measure Succeed"),
-    CREATE_JOB_SUCCESS(205,"CREATE Job Succeed"),
-    DELETE_JOB_SUCCESS(206,"Delete Job Succeed"),
-    SET_JOB_DELETED_STATUS_SUCCESS(207,"Set Job Deleted Status Succeed"),
-    PAUSE_JOB_SUCCESS(208,"Pause Job Succeed"),
-    UPDATE_JOB_INSTANCE_SUCCESS(209,"Update Job Instance Succeed"),
+    CREATE_JOB_SUCCESS(205, "CREATE Job Succeed"),
+    DELETE_JOB_SUCCESS(206, "Delete Job Succeed"),
+    SET_JOB_DELETED_STATUS_SUCCESS(207, "Set Job Deleted Status Succeed"),
+    PAUSE_JOB_SUCCESS(208, "Pause Job Succeed"),
+    UPDATE_JOB_INSTANCE_SUCCESS(209, "Update Job Instance Succeed"),
 
     //failed
     RESOURCE_NOT_FOUND(400, "Resource Not Found"),
     CREATE_MEASURE_FAIL(401, "Create Measure Failed"),
-    DELETE_MEASURE_BY_ID_FAIL(402,"Delete Measures By Name Failed"),
+    DELETE_MEASURE_BY_ID_FAIL(402, "Delete Measures By Name Failed"),
     DELETE_MEASURE_BY_NAME_FAIL(403, "Delete Measures By Name Failed"),
     UPDATE_MEASURE_FAIL(404, "Update Measure Failed"),
-    CREATE_JOB_FAIL(405,"Create Job Failed"),
-    DELETE_JOB_FAIL(406,"Delete Job Failed"),
-    SET_JOB_DELETED_STATUS_FAIL(407,"Set Job Deleted Status Failed"),
-    PAUSE_JOB_FAIL(408,"Pause Job Failed"),
-    UPDATE_JOB_INSTANCE_FAIL(409,"Update Job Instance Failed"),
+    CREATE_JOB_FAIL(405, "Create Job Failed"),
+    DELETE_JOB_FAIL(406, "Delete Job Failed"),
+    SET_JOB_DELETED_STATUS_FAIL(407, "Set Job Deleted Status Failed"),
+    PAUSE_JOB_FAIL(408, "Pause Job Failed"),
+    UPDATE_JOB_INSTANCE_FAIL(409, "Update Job Instance Failed"),
     CREATE_MEASURE_FAIL_DUPLICATE(410, "Create Measure Failed, duplicate records"),
-    UNEXPECTED_RUNTIME_EXCEPTION(411, "Unexpected RuntimeException")
-    ;
+    UNEXPECTED_RUNTIME_EXCEPTION(411, "Unexpected RuntimeException");
 
     private final int code;
     private final String description;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/util/GriffinUtil.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/util/GriffinUtil.java b/service/src/main/java/org/apache/griffin/core/util/GriffinUtil.java
index 5355464..961c3d4 100644
--- a/service/src/main/java/org/apache/griffin/core/util/GriffinUtil.java
+++ b/service/src/main/java/org/apache/griffin/core/util/GriffinUtil.java
@@ -30,50 +30,47 @@ import org.springframework.core.io.ClassPathResource;
 import java.io.IOException;
 import java.util.Properties;
 
-/**
- * Created by xiangrchen on 7/21/17.
- */
 public class GriffinUtil {
     private static final Logger LOGGER = LoggerFactory.getLogger(GriffinUtil.class);
 
-    public static String toJson(Object obj)  {
-        ObjectMapper mapper=new ObjectMapper();
-        String jsonStr=null;
+    public static String toJson(Object obj) {
+        ObjectMapper mapper = new ObjectMapper();
+        String jsonStr = null;
         try {
-            jsonStr=mapper.writeValueAsString(obj);
+            jsonStr = mapper.writeValueAsString(obj);
         } catch (JsonProcessingException e) {
-            LOGGER.error("convert to json failed. "+obj);
+            LOGGER.error("convert to json failed. {}", obj);
         }
         return jsonStr;
     }
 
-    public static <T>T toEntity(String jsonStr,Class<T> type) throws IOException {
-        if (jsonStr==null || jsonStr.length()==0){
-            LOGGER.warn("jsonStr "+type+" is empty!");
+    public static <T> T toEntity(String jsonStr, Class<T> type) throws IOException {
+        if (jsonStr == null || jsonStr.length() == 0) {
+            LOGGER.warn("jsonStr {} is empty!", type);
             return null;
         }
-        ObjectMapper mapper=new ObjectMapper();
-        return mapper.readValue(jsonStr,type);
+        ObjectMapper mapper = new ObjectMapper();
+        return mapper.readValue(jsonStr, type);
     }
 
-    public static <T>T toEntity(String jsonStr,TypeReference type) throws IOException {
-        if (jsonStr==null || jsonStr.length()==0){
-            LOGGER.warn("jsonStr "+type+" is empty!");
+    public static <T> T toEntity(String jsonStr, TypeReference type) throws IOException {
+        if (jsonStr == null || jsonStr.length() == 0) {
+            LOGGER.warn("jsonStr {} is empty!", type);
             return null;
         }
-        ObjectMapper mapper=new ObjectMapper();
-        return mapper.readValue(jsonStr,type);
+        ObjectMapper mapper = new ObjectMapper();
+        return mapper.readValue(jsonStr, type);
     }
 
     public static Properties getProperties(String propertiesPath) {
         PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
         propertiesFactoryBean.setLocation(new ClassPathResource(propertiesPath));
-        Properties properties=null;
+        Properties properties = null;
         try {
             propertiesFactoryBean.afterPropertiesSet();
-            properties=propertiesFactoryBean.getObject();
+            properties = propertiesFactoryBean.getObject();
         } catch (IOException e) {
-            LOGGER.error("get properties from "+propertiesPath+" failed. "+e);
+            LOGGER.error("get properties from {} failed. {}", propertiesPath, e.getMessage());
         }
         return properties;
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java b/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
index 60442ec..d939d82 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobControllerTest.java
@@ -43,11 +43,12 @@ import java.util.Map;
 import static org.hamcrest.CoreMatchers.is;
 import static org.mockito.BDDMockito.given;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
+import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
 import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
 import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
 
 @RunWith(SpringRunner.class)
-@WebMvcTest(value = JobController.class,secure = false)
+@WebMvcTest(value = JobController.class, secure = false)
 public class JobControllerTest {
     @Autowired
     private MockMvc mvc;
@@ -56,73 +57,75 @@ public class JobControllerTest {
     private JobService service;
 
     @Before
-    public void setup(){
+    public void setup() {
     }
 
     @Test
     public void testGetJobs() throws Exception {
-        Map<String, Serializable> map=new HashMap<String, Serializable>();
+        Map<String, Serializable> map = new HashMap<>();
         map.put("jobName", "job1");
         map.put("groupName", "BA");
         given(service.getAliveJobs()).willReturn(Arrays.asList(map));
 
         mvc.perform(get("/jobs/").contentType(MediaType.APPLICATION_JSON))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.[0].jobName",is("job1")))
-        ;
+                .andExpect(jsonPath("$.[0].jobName", is("job1")));
     }
 
     @Test
     public void testAddJob() throws Exception {
-        String groupName="BA";
-        String jobName="job1";
-        long measureId=0;
-        JobRequestBody jobRequestBody =new JobRequestBody("YYYYMMdd-HH","YYYYMMdd-HH","111","20170607","100");
-        ObjectMapper mapper=new ObjectMapper();
-        String schedulerRequestBodyJson=mapper.writeValueAsString(jobRequestBody);
+        String groupName = "BA";
+        String jobName = "job1";
+        long measureId = 0;
+        JobRequestBody jobRequestBody = new JobRequestBody("YYYYMMdd-HH", "YYYYMMdd-HH", "111", "20170607", "100");
+        ObjectMapper mapper = new ObjectMapper();
+        String schedulerRequestBodyJson = mapper.writeValueAsString(jobRequestBody);
         given(service.addJob(groupName, jobName, measureId, jobRequestBody)).willReturn(GriffinOperationMessage.CREATE_JOB_SUCCESS);
 
-        mvc.perform(post("/jobs?group=BA&jobName=job1&measureId=0").contentType(MediaType.APPLICATION_JSON).content(schedulerRequestBodyJson))
+        mvc.perform(post("/jobs").param("group", groupName).param("jobName", jobName)
+                .param("measureId", String.valueOf(measureId))
+                .contentType(MediaType.APPLICATION_JSON)
+                .content(schedulerRequestBodyJson))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.code",is(GriffinOperationMessage.CREATE_JOB_SUCCESS.getCode())))
-                .andExpect(jsonPath("$.description", is(GriffinOperationMessage.CREATE_JOB_SUCCESS.getDescription())))
-        ;
+                .andExpect(jsonPath("$.code", is(205)))
+                .andExpect(jsonPath("$.description", is("CREATE Job Succeed")))
+                .andDo(print());
     }
 
     @Test
     public void testDeleteJob() throws Exception {
-        String groupName="BA";
-        String jobName="job1";
-        given(service.deleteJob(groupName,jobName)).willReturn(GriffinOperationMessage.DELETE_JOB_SUCCESS);
-        mvc.perform(delete("/jobs?group=BA&jobName=job1").contentType(MediaType.APPLICATION_JSON))
+        String groupName = "BA";
+        String jobName = "job1";
+        given(service.deleteJob(groupName, jobName)).willReturn(GriffinOperationMessage.DELETE_JOB_SUCCESS);
+
+        mvc.perform(delete("/jobs").param("group", groupName).param("jobName", jobName))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.code",is(GriffinOperationMessage.DELETE_JOB_SUCCESS.getCode())))
-                .andExpect(jsonPath("$.description", is(GriffinOperationMessage.DELETE_JOB_SUCCESS.getDescription())))
-        ;
+                .andExpect(jsonPath("$.code", is(206)))
+                .andExpect(jsonPath("$.description", is("Delete Job Succeed")));
     }
 
     @Test
     public void testFindInstancesOfJob() throws Exception {
-        String group="BA";
-        String job="job1";
-        int page=0;
-        int size=2;
-        JobInstance jobInstance=new JobInstance(group, job, 1, LivySessionStates.State.running, "","", System.currentTimeMillis());
-        given(service.findInstancesOfJob(group,job,page,size)).willReturn(Arrays.asList(jobInstance));
-        mvc.perform(get("/jobs/instances?group=BA&jobName=job1&page=0&size=2").contentType(MediaType.APPLICATION_JSON))
+        String groupName = "BA";
+        String jobName = "job1";
+        int page = 0;
+        int size = 2;
+        JobInstance jobInstance = new JobInstance(groupName, jobName, 1, LivySessionStates.State.running, "", "", System.currentTimeMillis());
+        given(service.findInstancesOfJob(groupName, jobName, page, size)).willReturn(Arrays.asList(jobInstance));
+
+        mvc.perform(get("/jobs/instances").param("group", groupName).param("jobName", jobName)
+                .param("page", String.valueOf(page)).param("size", String.valueOf(size)))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.[0].groupName",is("BA")))
-        ;
+                .andExpect(jsonPath("$.[0].groupName", is("BA")));
     }
 
     @Test
     public void testGetHealthInfo() throws Exception {
-        JobHealth jobHealth=new JobHealth(1,3);
+        JobHealth jobHealth = new JobHealth(1, 3);
         given(service.getHealthInfo()).willReturn(jobHealth);
-        mvc.perform(get("/jobs/health").contentType(MediaType.APPLICATION_JSON))
+
+        mvc.perform(get("/jobs/health"))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.healthyJobCount",is(1)))
-                .andExpect(jsonPath("$.jobCount", is(3)))
-        ;
+                .andExpect(jsonPath("$.healthyJobCount", is(1)));
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/JobInstanceRepoTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobInstanceRepoTest.java b/service/src/test/java/org/apache/griffin/core/job/JobInstanceRepoTest.java
index 2ef8b3d..c7df8d2 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobInstanceRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobInstanceRepoTest.java
@@ -1,61 +1,88 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
 package org.apache.griffin.core.job;
 
+import org.apache.griffin.core.job.entity.JobInstance;
+import org.apache.griffin.core.job.entity.LivySessionStates;
 import org.apache.griffin.core.job.repo.JobInstanceRepo;
-import org.apache.griffin.core.measure.MeasureRepoTest;
-import org.apache.griffin.core.measure.repo.DataConnectorRepo;
-import org.apache.griffin.core.measure.repo.EvaluateRuleRepo;
-import org.apache.griffin.core.measure.repo.MeasureRepo;
+import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
 import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
 import org.springframework.context.annotation.PropertySource;
-import org.springframework.test.context.jdbc.Sql;
+import org.springframework.data.domain.PageRequest;
+import org.springframework.data.domain.Pageable;
+import org.springframework.data.domain.Sort;
 import org.springframework.test.context.junit4.SpringRunner;
 
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.assertEquals;
+
 @RunWith(SpringRunner.class)
 @PropertySource("classpath:application.properties")
 @DataJpaTest
-@Sql("classpath:test.sql")
 public class JobInstanceRepoTest {
 
-    private static final Logger LOGGER = LoggerFactory.getLogger(JobInstanceRepoTest.class);
-
-
     @Autowired
-    private TestEntityManager testEntityManager;
+    private TestEntityManager entityManager;
 
     @Autowired
     private JobInstanceRepo jobInstanceRepo;
 
+    @Before
+    public void setUp() {
+        setEntityManager();
+    }
+
+    @Test
+    public void testFindByGroupNameAndJobNameWithPageable() {
+        Pageable pageRequest = new PageRequest(0, 10, Sort.Direction.DESC, "timestamp");
+        List<JobInstance> instances = jobInstanceRepo.findByGroupNameAndJobName("BA", "job3", pageRequest);
+        assertThat(instances.size()).isEqualTo(1);
+        assertEquals(instances.get(0).getAppId(), "appId3");
+    }
+
     @Test
-    public void testFindByGroupNameAndJobName3Args(){
-/*
-        jobInstanceRepo.findByGroupNameAndJobName();
-*/
+    public void testFindByGroupNameAndJobName() {
+        List<JobInstance> instances = jobInstanceRepo.findByGroupNameAndJobName("BA", "job1");
+        assertThat(instances.size()).isEqualTo(1);
+        assertEquals(instances.get(0).getAppId(), "appId1");
+    }
+
+    @Test
+    public void testFindGroupWithJobName() {
+        List<Object> list = jobInstanceRepo.findGroupWithJobName();
+        assertThat(list.size()).isEqualTo(3);
+    }
+
+    @Test
+    public void testDeleteByGroupAndJobName() {
+        jobInstanceRepo.deleteByGroupAndJobName("BA", "job1");
+        assertThat(jobInstanceRepo.count()).isEqualTo(2);
+    }
 
+    @Test
+    public void testUpdate() {
+        Iterable iterable = jobInstanceRepo.findAll();
+        JobInstance instance = (JobInstance) iterable.iterator().next();
+        jobInstanceRepo.update(instance.getId(), LivySessionStates.State.dead, "appIdChanged", "appUriChanged");
+        //you must refresh updated JobInstance, otherwise there will not update.
+        entityManager.refresh(jobInstanceRepo.findOne(instance.getId()));
+        assertEquals(jobInstanceRepo.findOne(instance.getId()).getState(), LivySessionStates.State.dead);
     }
 
+
+    private void setEntityManager() {
+        JobInstance instance1 = new JobInstance("BA", "job1", 0, LivySessionStates.State.success,
+                "appId1", "http://domain.com/uri1", System.currentTimeMillis());
+        JobInstance instance2 = new JobInstance("BA", "job2", 1, LivySessionStates.State.error,
+                "appId2", "http://domain.com/uri2", System.currentTimeMillis());
+        JobInstance instance3 = new JobInstance("BA", "job3", 2, LivySessionStates.State.starting,
+                "appId3", "http://domain.com/uri3", System.currentTimeMillis());
+        entityManager.persistAndFlush(instance1);
+        entityManager.persistAndFlush(instance2);
+        entityManager.persistAndFlush(instance3);
+    }
 }


[2/4] incubator-griffin git commit: change json format, update unit test and fix hive connect

Posted by gu...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
index a77dc69..6efa102 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
@@ -19,6 +19,7 @@ under the License.
 
 package org.apache.griffin.core.job;
 
+import org.apache.griffin.core.error.exception.GriffinException;
 import org.apache.griffin.core.job.entity.JobHealth;
 import org.apache.griffin.core.job.entity.JobInstance;
 import org.apache.griffin.core.job.entity.JobRequestBody;
@@ -30,10 +31,8 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.Mockito;
 import org.quartz.*;
+import org.quartz.impl.JobDetailImpl;
 import org.quartz.impl.matchers.GroupMatcher;
-import org.quartz.impl.triggers.CronTriggerImpl;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.boot.test.mock.mockito.MockBean;
@@ -44,28 +43,26 @@ import org.springframework.data.domain.Sort;
 import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 import org.springframework.test.context.junit4.SpringRunner;
 
-import java.io.Serializable;
 import java.util.*;
 
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.fail;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createJobDetail;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.BDDMockito.given;
-import static org.quartz.JobBuilder.newJob;
+import static org.quartz.TriggerBuilder.newTrigger;
 
 @RunWith(SpringRunner.class)
 public class JobServiceImplTest {
-    private static final Logger log = LoggerFactory.getLogger(JobServiceImplTest.class);
 
     @TestConfiguration
-    public static class SchedulerServiceConfiguration{
+    public static class SchedulerServiceConfiguration {
         @Bean
-        public JobServiceImpl service(){
+        public JobServiceImpl service() {
             return new JobServiceImpl();
         }
+
         @Bean
-        public SchedulerFactoryBean factoryBean(){
+        public SchedulerFactoryBean factoryBean() {
             return new SchedulerFactoryBean();
         }
     }
@@ -78,145 +75,123 @@ public class JobServiceImplTest {
     private SchedulerFactoryBean factory;
 
     @Autowired
-    private JobServiceImpl service;
+    public JobServiceImpl service;
 
     @Before
-    public void setup(){
+    public void setup() {
     }
 
     @Test
-    public void testGetJobs(){
+    public void testGetAliveJobs() throws SchedulerException {
+        Scheduler scheduler = Mockito.mock(Scheduler.class);
+        JobDetailImpl jobDetail = createJobDetail();
+        given(factory.getObject()).willReturn(scheduler);
+        given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("group"));
+        HashSet<JobKey> set = new HashSet<JobKey>() {{
+            add(new JobKey("name", "group"));
+        }};
+        given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("group"))).willReturn(set);
+        Trigger trigger = newTrigger().withIdentity(TriggerKey.triggerKey("name", "group")).
+                withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInSeconds(3000).repeatForever())
+                .startAt(new Date()).build();
+        List<Trigger> triggers = Arrays.asList(trigger);
+        JobKey jobKey = set.iterator().next();
+        given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(triggers);
+        given(scheduler.getJobDetail(jobKey)).willReturn(jobDetail);
+        assertEquals(service.getAliveJobs().size(), 1);
+
+        // trigger is empty
+        given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(Arrays.asList());
+        assertEquals(service.getAliveJobs().size(), 0);
+
+        // schedule exception
+        GriffinException.GetJobsFailureException exception = null;
         try {
-            Scheduler scheduler=Mockito.mock(Scheduler.class);
-            given(factory.getObject()).willReturn(scheduler);
-            List<Map<String, Serializable>> tmp = service.getAliveJobs();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all jobs info from dbs");
+            given(scheduler.getTriggersOfJob(jobKey)).willThrow(new GriffinException.GetJobsFailureException());
+            service.getAliveJobs();
+        } catch (GriffinException.GetJobsFailureException e) {
+            exception = e;
         }
-    }
+        assertTrue(exception != null);
 
-    @Test
-    public void testSetJobsByKey(){
-        try {
-            List<Map<String, Serializable>> list = new ArrayList<Map<String, Serializable>>();
-            Scheduler scheduler = Mockito.mock(Scheduler.class);
-            JobKey jobKey = new JobKey("TEST");
-            List<Trigger> triggers = new ArrayList<Trigger>();
-            Trigger trigger = new CronTriggerImpl();
-            triggers.add(trigger);
-            given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(triggers);
-
-            JobDetail jd = Mockito.mock(JobDetail.class);
-            given(scheduler.getJobDetail(jobKey)).willReturn(jd);
-
-            JobDataMap jobDataMap = Mockito.mock(JobDataMap.class);
-            given(jd.getJobDataMap()).willReturn(jobDataMap);
-
-            //        service.setJobsByKey(list,scheduler,jobKey);
-        } catch (SchedulerException e) {
-            fail("can't set jobs by key.");
-        }
 
     }
 
     @Test
-    public void testAddJob(){
-        try {
-            String groupName="BA";
-            String jobName="job1";
-            long measureId=0;
-            JobRequestBody jobRequestBody =new JobRequestBody();
-            Scheduler scheduler=Mockito.mock(Scheduler.class);
-            given(factory.getObject()).willReturn(scheduler);
-            GriffinOperationMessage tmp = service.addJob(groupName,jobName,measureId, jobRequestBody);
-            assertEquals(tmp,GriffinOperationMessage.CREATE_JOB_FAIL);
-            assertTrue(true);
-
-            JobRequestBody jobRequestBody1 =new JobRequestBody("YYYYMMdd-HH","YYYYMMdd-HH",
-                    System.currentTimeMillis()+"",System.currentTimeMillis()+"","1000");
-            Scheduler scheduler1=Mockito.mock(Scheduler.class);
-            given(factory.getObject()).willReturn(scheduler1);
-            GriffinOperationMessage tmp1 = service.addJob(groupName,jobName,measureId, jobRequestBody1);
-            assertEquals(tmp1,GriffinOperationMessage.CREATE_JOB_SUCCESS);
-        }catch (Throwable t){
-            fail("Cannot add job ");
-        }
+    public void testAddJob() {
+        String groupName = "BA";
+        String jobName = "job1";
+        long measureId = 0;
+        JobRequestBody jobRequestBody = new JobRequestBody();
+        Scheduler scheduler = Mockito.mock(Scheduler.class);
+        given(factory.getObject()).willReturn(scheduler);
+        assertEquals(service.addJob(groupName, jobName, measureId, jobRequestBody), GriffinOperationMessage.CREATE_JOB_FAIL);
+
+        JobRequestBody jobRequestBody1 = new JobRequestBody("YYYYMMdd-HH", "YYYYMMdd-HH",
+                System.currentTimeMillis() + "", System.currentTimeMillis() + "", "1000");
+        Scheduler scheduler1 = Mockito.mock(Scheduler.class);
+        given(factory.getObject()).willReturn(scheduler1);
+        assertEquals(service.addJob(groupName, jobName, measureId, jobRequestBody1), GriffinOperationMessage.CREATE_JOB_SUCCESS);
     }
 
     @Test
-    public void testDeleteJob() {
-        String groupName="BA";
-        String jobName="job1";
-        JobKey jobKey=new JobKey(jobName, groupName);
-        JobDetail jobDetail = newJob(SparkSubmitJob.class)
-                .storeDurably()
-                .withIdentity(jobKey)
-                .build();
-        JobRequestBody jobRequestBody=new JobRequestBody("YYYYMMdd-HH", "YYYYMMdd-HH", null, "1503158400000", "50");
-        service.setJobData(jobDetail, jobRequestBody, 0L, groupName, jobName);
-        Scheduler scheduler=Mockito.mock(Scheduler.class);
+    public void testDeleteJob() throws SchedulerException {
+        String groupName = "BA";
+        String jobName = "job1";
+        Scheduler scheduler = Mockito.mock(Scheduler.class);
+        // DELETE_JOB_SUCCESS
         given(factory.getObject()).willReturn(scheduler);
-        try {
-            given(scheduler.getJobDetail(jobKey)).willReturn(jobDetail);
-        } catch (SchedulerException e) {
-            fail("fail to return jobDetail for scheduler.getJobDetail(jobKey)");
-        }
+        given(scheduler.getJobDetail(new JobKey(jobName,groupName))).willReturn(createJobDetail());
+        assertEquals(service.deleteJob(groupName, jobName), GriffinOperationMessage.DELETE_JOB_SUCCESS);
 
-        GriffinOperationMessage tmp = service.deleteJob(groupName,jobName);
-        assertThat(tmp).isEqualTo(GriffinOperationMessage.DELETE_JOB_SUCCESS);
+        // DELETE_JOB_FAIL
         given(factory.getObject()).willThrow(SchedulerException.class);
-        tmp = service.deleteJob(groupName,jobName);
-        assertThat(tmp).isEqualTo(GriffinOperationMessage.DELETE_JOB_FAIL);
+        assertEquals(service.deleteJob(groupName, jobName), GriffinOperationMessage.DELETE_JOB_FAIL);
     }
 
     @Test
-    public void testFindInstancesOfJob(){
-        try {
-            String groupName="BA";
-            String jobName="job1";
-            int page=0;
-            int size=2;
-            List<JobInstance> tmp = service.findInstancesOfJob(groupName,jobName,page,size);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot find instances of Job");
-        }
+    public void testFindInstancesOfJob() {
+        String groupName = "BA";
+        String jobName = "job1";
+        int page = 0;
+        int size = 2;
+        JobInstance jobInstance = new JobInstance(groupName, jobName, 1, LivySessionStates.State.dead, "app_id", "app_uri", System.currentTimeMillis());
+        Pageable pageRequest = new PageRequest(page, size, Sort.Direction.DESC, "timestamp");
+        given(jobInstanceRepo.findByGroupNameAndJobName(groupName, jobName, pageRequest)).willReturn(Arrays.asList(jobInstance));
+        assertEquals(service.findInstancesOfJob(groupName, jobName, page, size).size(),1);
     }
 
     @Test
-    public void testGetHealthInfo(){
-        try {
-            Scheduler scheduler=Mockito.mock(Scheduler.class);
-            given(factory.getObject()).willReturn(scheduler);
-            given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("BA"));
-            JobKey jobKey= new JobKey("TEST");
-            Set<JobKey> jobKeySet=new HashSet<JobKey>();
-            jobKeySet.add(jobKey);
-            given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("BA"))).willReturn((jobKeySet));
-
-            Pageable pageRequest=new PageRequest(0,1, Sort.Direction.DESC,"timestamp");
-            List<JobInstance> scheduleStateList=new ArrayList<JobInstance>();
-            JobInstance jobInstance=new JobInstance();
-            jobInstance.setGroupName("BA");
-            jobInstance.setJobName("job1");
-            jobInstance.setSessionId(1);
-            jobInstance.setState(LivySessionStates.State.starting);
-            jobInstance.setAppId("ttt");
-            jobInstance.setTimestamp(System.currentTimeMillis());
-            scheduleStateList.add(jobInstance);
-            given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(),jobKey.getName(),pageRequest)).willReturn(scheduleStateList);
-            JobHealth tmp = service.getHealthInfo();
-            assertTrue(true);
-
-            scheduleStateList.remove(0);
-            jobInstance.setState(LivySessionStates.State.success);
-            scheduleStateList.add(jobInstance);
-            given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(),jobKey.getName(),pageRequest)).willReturn(scheduleStateList);
-            JobHealth tmp1 = service.getHealthInfo();
-        }catch (Throwable t){
-            fail("Cannot get Health info "+t);
-        }
+    public void testGetHealthInfo() throws SchedulerException {
+        Scheduler scheduler = Mockito.mock(Scheduler.class);
+        given(factory.getObject()).willReturn(scheduler);
+        given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("BA"));
+        JobKey jobKey = new JobKey("test");
+        Set<JobKey> jobKeySet = new HashSet<>();
+        jobKeySet.add(jobKey);
+        given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("BA"))).willReturn((jobKeySet));
+
+        Pageable pageRequest = new PageRequest(0, 1, Sort.Direction.DESC, "timestamp");
+        List<JobInstance> scheduleStateList = new ArrayList<>();
+        JobInstance jobInstance = new JobInstance();
+        jobInstance.setGroupName("BA");
+        jobInstance.setJobName("job1");
+        jobInstance.setSessionId(1);
+        jobInstance.setState(LivySessionStates.State.starting);
+        jobInstance.setAppId("app_id");
+        jobInstance.setTimestamp(System.currentTimeMillis());
+        scheduleStateList.add(jobInstance);
+        given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(), jobKey.getName(), pageRequest)).willReturn(scheduleStateList);
+        JobHealth health1 = service.getHealthInfo();
+        assertEquals(health1.getHealthyJobCount(),1);
+
+        scheduleStateList.remove(0);
+        jobInstance.setState(LivySessionStates.State.error);
+        scheduleStateList.add(jobInstance);
+        given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(), jobKey.getName(), pageRequest)).willReturn(scheduleStateList);
+        JobHealth health2 = service.getHealthInfo();
+        assertEquals(health2.getHealthyJobCount(),0);
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
index d748b01..60ddf3b 100644
--- a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
@@ -7,7 +7,7 @@ to you under the Apache License, Version 2.0 (the
 "License"); you may not use this file except in compliance
 with the License.  You may obtain a copy of the License at
 
-  http:www.apache.org/licenses/LICENSE-2.0
+  http://www.apache.org/licenses/LICENSE-2.0
 
 Unless required by applicable law or agreed to in writing,
 software distributed under the License is distributed on an
@@ -19,209 +19,80 @@ under the License.
 
 package org.apache.griffin.core.job;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.griffin.core.job.entity.JobInstance;
 import org.apache.griffin.core.job.entity.SparkJobDO;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
-import org.apache.griffin.core.measure.entity.Measure;
+import org.apache.griffin.core.job.repo.JobInstanceRepo;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+import org.apache.griffin.core.util.GriffinUtil;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-import org.quartz.JobDataMap;
 import org.quartz.JobDetail;
 import org.quartz.JobExecutionContext;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.boot.test.mock.mockito.MockBean;
 import org.springframework.context.annotation.Bean;
-import org.springframework.core.io.ClassPathResource;
-import org.springframework.core.io.ResourceLoader;
-import org.springframework.test.context.jdbc.Sql;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.web.client.RestTemplate;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.net.URL;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 import java.util.Properties;
 
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createJobDetail;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.mock;
 
 
 @RunWith(SpringRunner.class)
-public class SparkSubmitJobTest{
+public class SparkSubmitJobTest {
 
+    @TestConfiguration
+    public static class SchedulerServiceConfiguration {
+        @Bean
+        public SparkSubmitJob sparkSubmitJobBean() {
+            return new SparkSubmitJob();
+        }
 
-    @InjectMocks
-    private SparkSubmitJob ssj=new SparkSubmitJob();
-
-    @Mock
-    private MeasureRepo measureRepo;
-
-    @Mock
-    private Properties sparkJobProps;
-    @Mock
-    private RestTemplate restTemplate;
-   /* @Before
-    public void setUp() throws IOException {
-        ssj=new SparkSubmitJob();
-        ssj.measureRepo=mock(MeasureRepo.class);
-        ssj.restTemplate= mock(RestTemplate.class);
-    }*/
-
-/*    @Test
-    public void test_execute() throws Exception {
-        JobExecutionContext context=mock(JobExecutionContext.class);
-        JobDetail jd = mock(JobDetail.class);
-        when(context.getJobDetail()).thenReturn(jd);
-
-        JobDataMap jdmap = mock(JobDataMap.class);
-        when(jd.getJobDataMap()).thenReturn(jdmap);
-
-        when(jdmap.getString("measure")).thenReturn("bevssoj");
-        when(jdmap.getString("sourcePat")).thenReturn("YYYYMMDD-HH");
-        when(jdmap.getString("targetPat")).thenReturn("YYYYMMDD-HH");
-        when(jdmap.getString("dataStartTimestamp")).thenReturn("1460174400000");
-        when(jdmap.getString("lastTime")).thenReturn("");
-        when(jdmap.getString("periodTime")).thenReturn("10");
-        Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-        when(ssj.measureRepo.findByName("bevssoj")).thenReturn(measure);
-
-        RestTemplate restTemplate =Mockito.mock(RestTemplate.class);
-        PowerMockito.whenNew(RestTemplate.class).withAnyArguments().thenReturn(restTemplate);
-        String uri=ssj.uri;
-        SparkJobDO sparkJobDO= Mockito.mock(SparkJobDO.class);
-        PowerMockito.when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
-        when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
-        ssj.execute(context);
-
-        long currentSystemTimestamp=System.currentTimeMillis();
-        long currentTimstamp = ssj.setCurrentTimestamp(currentSystemTimestamp);
-
-        verify(ssj.measureRepo).findByName("bevssoj");
-        verify(jdmap,atLeast(2)).put("lastTime",currentTimstamp+"");
-
-        when(ssj.measureRepo.findByName("bevssoj")).thenReturn(null);
-        ssj.execute(context);
-
-        when(ssj.measureRepo.findByName("bevssoj")).thenReturn(measure);
-        String result="{\"id\":8718,\"state\":\"starting\",\"appId\":null,\"appInfo\":{\"driverLogUrl\":null,\"sparkUiUrl\":null},\"log\":[]}";
-        when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(result);
-        ssj.execute(context);
-    }*/
-    @Test
-    public void test_execute() throws Exception {
-        JobExecutionContext context=mock(JobExecutionContext.class);
-        JobDetail jd = mock(JobDetail.class);
-        when(context.getJobDetail()).thenReturn(jd);
-        JobDataMap jdmap = mock(JobDataMap.class);
-        when(jd.getJobDataMap()).thenReturn(jdmap);
-        when(jdmap.getString("measureId")).thenReturn("0");
-        when(jdmap.getString("sourcePat")).thenReturn("YYYYMMDD-HH");
-        when(jdmap.getString("targetPat")).thenReturn("YYYYMMDD-HH");
-        when(jdmap.getString("dataStartTimestamp")).thenReturn("1460174400000");
-        when(jdmap.getString("lastTime")).thenReturn("");
-        when(jdmap.getString("periodTime")).thenReturn("10");
-        Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-        when(measureRepo.findOne(Long.valueOf("0"))).thenReturn(measure);
-        Properties sparkJobProperties=new Properties();
-        sparkJobProperties.load(new FileInputStream(new ClassPathResource("sparkJob.properties").getFile()));
-        when(sparkJobProps.getProperty("sparkJob.dateAndHour")).thenReturn(sparkJobProperties.getProperty("sparkJob.dateAndHour"));
-        when(sparkJobProps.getProperty("sparkJob.numExecutors")).thenReturn(sparkJobProperties.getProperty("sparkJob.numExecutors"));
-        when(sparkJobProps.getProperty("sparkJob.executorCores")).thenReturn(sparkJobProperties.getProperty("sparkJob.executorCores"));
-        when(sparkJobProps.getProperty("sparkJob.driverMemory")).thenReturn(sparkJobProperties.getProperty("sparkJob.driverMemory"));
-        when(sparkJobProps.getProperty("sparkJob.executorMemory")).thenReturn(sparkJobProperties.getProperty("sparkJob.executorMemory"));
-        String uri=sparkJobProperties.getProperty("livy.uri");
-        when(sparkJobProps.getProperty("livy.uri")).thenReturn(uri);
-        SparkJobDO sparkJobDO= mock(SparkJobDO.class);
-        when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
-        ssj.execute(context);
+        @Bean
+        public Properties sparkJobProps() {
+            return GriffinUtil.getProperties("/sparkJob.properties");
+        }
 
     }
 
-    private Measure createATestMeasure(String name,String org)throws IOException,Exception{
-        HashMap<String,String> configMap1=new HashMap<>();
-        configMap1.put("database","default");
-        configMap1.put("table.name","test_data_src");
-        HashMap<String,String> configMap2=new HashMap<>();
-        configMap2.put("database","default");
-        configMap2.put("table.name","test_data_tgt");
-        String configJson1 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap1);
-        String configJson2 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap2);
+    @Autowired
+    private SparkSubmitJob sparkSubmitJob;
 
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
-        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
+    @MockBean
+    private MeasureRepo measureRepo;
 
-        EvaluateRule eRule = new EvaluateRule(1,rules);
+    @MockBean
+    private RestTemplate restTemplate;
 
-        Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
+    @MockBean
+    private JobInstanceRepo jobInstanceRepo;
 
-        return measure;
+    @Before
+    public void setUp() {
     }
 
     @Test
-    public void test_genPartitions(){
-        String[] patternItemSet={"YYYYMMDD","HH"};
-        String[] partitionItemSet={"date","hour"};
-        long timestamp=System.currentTimeMillis();
-        Map<String,String> par=ssj.genPartitionMap(patternItemSet,partitionItemSet,timestamp);
-        Map<String,String> verifyMap=new HashMap<>();
-        SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMdd");
-        verifyMap.put("date",sdf.format(new Date(timestamp)));
-        SimpleDateFormat sdf1 = new SimpleDateFormat("HH");
-        verifyMap.put("hour",sdf1.format(new Date(timestamp)));
-        assertEquals(verifyMap,par);
-    }
+    public void testExecute() throws Exception {
+        String livyUri = null;
+        String result = "{\"id\":1,\"state\":\"starting\",\"appId\":null,\"appInfo\":{\"driverLogUrl\":null,\"sparkUiUrl\":null},\"log\":[]}";
+        JobExecutionContext context = mock(JobExecutionContext.class);
+        JobDetail jd = createJobDetail();
+        given(context.getJobDetail()).willReturn(jd);
+        given(measureRepo.findOne(Long.valueOf(jd.getJobDataMap().getString("measureId")))).willReturn(createATestMeasure("view_item_hourly", "ebay"));
+        given(restTemplate.postForObject(livyUri, new SparkJobDO(), String.class)).willReturn(result);
+        given(jobInstanceRepo.save(new JobInstance())).willReturn(new JobInstance());
+        sparkSubmitJob.execute(context);
+        assertTrue(true);
 
-  /*  @Test
-    public void test_setDataConnectorPartitions(){
-        DataConnector dc=mock(DataConnector.class);
-        String[] patternItemSet={"YYYYMMDD","HH"};
-        String[] partitionItemSet={"date","hour"};
-        long timestamp=System.currentTimeMillis();
-        ssj.setDataConnectorPartitions(dc,patternItemSet,partitionItemSet,timestamp);
-        Map<String,String> map=new HashMap<>();
-        SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMdd");
-        SimpleDateFormat sdf1 = new SimpleDateFormat("HH");
-        map.put("partitions","date="+sdf.format(new Date(timestamp))+", hour="+sdf1.format(new Date(timestamp)));
-        try {
-            verify(dc).setConfig(map);
-        } catch (JsonProcessingException e) {
-            e.printStackTrace();
-        }
-    }*/
-
-/*    @Test
-    public void test_setCurrentTimestamp(){
-        long timestamp=System.currentTimeMillis();
-        ssj.eachJoblastTimestamp="";
-        System.out.println(ssj.setCurrentTimestamp(timestamp));
-        ssj.eachJoblastTimestamp=(timestamp-1000)+"";
-        ssj.periodTime="1000";
-        System.out.println(ssj.setCurrentTimestamp(timestamp));
-    }*/
 
-    @Test
-    public void test_setSparkJobDO(){
-        ssj=mock(SparkSubmitJob.class);
-        doNothing().when(ssj).setSparkJobDO();
     }
 
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
index e26bf47..66ebb59 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
@@ -19,15 +19,12 @@ under the License.
 
 package org.apache.griffin.core.measure;
 
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.util.GriffinOperationMessage;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
-import org.mockito.Mockito;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
 import org.springframework.boot.test.mock.mockito.MockBean;
@@ -35,9 +32,9 @@ import org.springframework.http.MediaType;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.test.web.servlet.MockMvc;
 
-import java.io.IOException;
 import java.util.*;
 
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
 import static org.hamcrest.CoreMatchers.is;
 import static org.mockito.BDDMockito.given;
 import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
@@ -45,7 +42,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
 import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
 
 @RunWith(SpringRunner.class)
-@WebMvcTest(value=MeasureController.class,secure = false)
+@WebMvcTest(value = MeasureController.class, secure = false)
 public class MeasureControllerTest {
     @Autowired
     private MockMvc mvc;
@@ -53,125 +50,122 @@ public class MeasureControllerTest {
     @MockBean
     private MeasureService service;
 
+
     @Before
-    public void setup(){
+    public void setup() {
+
     }
 
     @Test
-    public void testGetAllMeasures() throws IOException,Exception{
-        Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-
+    public void testGetAllMeasures() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
         given(service.getAllAliveMeasures()).willReturn(Arrays.asList(measure));
-
         mvc.perform(get("/measures").contentType(MediaType.APPLICATION_JSON))
-//                .andDo(print())
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.[0].name",is("viewitem_hourly")))
-        ;
+                .andExpect(jsonPath("$.[0].name", is("view_item_hourly")));
     }
 
 
     @Test
-    public void testGetMeasuresById() throws IOException,Exception{
-        Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-
+    public void testGetMeasuresById() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
         given(service.getMeasureById(1L)).willReturn(measure);
-
         mvc.perform(get("/measure/1").contentType(MediaType.APPLICATION_JSON))
-//                .andDo(print())
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.name",is("viewitem_hourly")))
+                .andExpect(jsonPath("$.name", is("view_item_hourly")))
         ;
     }
 
-
     @Test
-    public void testDeleteMeasuresById() throws Exception{
+    public void testDeleteMeasuresById() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        // RESOURCE_NOT_FOUND
+        given(service.deleteMeasureById(1L)).willReturn(GriffinOperationMessage.RESOURCE_NOT_FOUND);
+        mvc.perform(delete("/measure/1").contentType(MediaType.APPLICATION_JSON))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.description", is("Resource Not Found")))
+                .andExpect(jsonPath("$.code", is(400)));
+
+        // DELETE_MEASURE_BY_ID_SUCCESS
         given(service.deleteMeasureById(1L)).willReturn(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS);
         mvc.perform(delete("/measure/1").contentType(MediaType.APPLICATION_JSON))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.code", is(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS.getCode())))
-                .andExpect(jsonPath("$.description", is(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS.getDescription())))
-        ;
+                .andExpect(jsonPath("$.description", is("Delete Measures By Name Succeed")))
+                .andExpect(jsonPath("$.code", is(202)));
     }
 
-/*    @Test
-    public void testDeleteMeasuresByName() throws Exception{
-        given(service.deleteMeasureById(0L)).willReturn(GriffinOperationMessage.DELETE_MEASURE_BY_NAME_SUCCESS);
 
-        mvc.perform(delete("/measures/deleteByName/"+measureName).contentType(MediaType.APPLICATION_JSON))
+    @Test
+    public void testUpdateMeasure() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        ObjectMapper mapper = new ObjectMapper();
+        String measureJson = mapper.writeValueAsString(measure);
+
+        // RESOURCE_NOT_FOUND
+        given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.RESOURCE_NOT_FOUND);
+        mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$",is("DELETE_MEASURE_BY_NAME_SUCCESS")))
-        ;
-    }*/
+                .andExpect(jsonPath("$.description", is("Resource Not Found")))
+                .andExpect(jsonPath("$.code", is(400)));
 
-    @Test
-    public void testUpdateMeasure() throws Exception{
-        String measureName="viewitem_hourly";
-        String org="bullseye";
-        Measure measure=createATestMeasure(measureName,org);
-        ObjectMapper mapper=new ObjectMapper();
-        String measureJson=mapper.writeValueAsString(measure);
-        given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
+        // UPDATE_MEASURE_FAIL
+        given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_FAIL);
+        mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.description", is("Update Measure Failed")))
+                .andExpect(jsonPath("$.code", is(404)));
 
+        // UPDATE_MEASURE_SUCCESS
+        given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
         mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.description",is(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS.getDescription())))
-                .andExpect(jsonPath("$.code", is(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS.getCode())))
-        ;
+                .andExpect(jsonPath("$.description", is("Update Measure Succeed")))
+                .andExpect(jsonPath("$.code", is(204)));
     }
 
     @Test
-    public void testGetAllAliveMeasureNameIdByOwner() throws Exception{
-        String Owner="test1";
-        List<Map<String, String>> measureList=new LinkedList<>();
+    public void testGetAllMeasuresOfOwner() throws Exception {
+        String owner = "test";
+        List<Map<String, String>> measureList = new LinkedList<>();
         HashMap<String, String> map = new HashMap<>();
-        map.put("name", "viewitem_hourly");
+        map.put("name", "view_item_hourly");
         map.put("id", "0");
         measureList.add(map);
-        given(service.getAllAliveMeasureNameIdByOwner(Owner)).willReturn(measureList);
+        given(service.getAllAliveMeasureNameIdByOwner(owner)).willReturn(measureList);
 
-        mvc.perform(get("/measures/owner/"+Owner).contentType(MediaType.APPLICATION_JSON))
+        mvc.perform(get("/measures/owner/" + owner).contentType(MediaType.APPLICATION_JSON))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.[0].name",is("viewitem_hourly")))
+                .andExpect(jsonPath("$.[0].name", is("view_item_hourly")))
         ;
     }
 
     @Test
-    public void testCreateNewMeasure() throws Exception{
-        String measureName="viewitem_hourly";
-        String org="bullseye";
-        Measure measure=createATestMeasure(measureName,org);
-        ObjectMapper mapper=new ObjectMapper();
-        String measureJson=mapper.writeValueAsString(measure);
-        given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
-
+    public void testCreateNewMeasure() throws Exception {
+        String measureName = "view_item_hourly";
+        String org = "ebay";
+        Measure measure = createATestMeasure(measureName, org);
+        ObjectMapper mapper = new ObjectMapper();
+        String measureJson = mapper.writeValueAsString(measure);
+        // CREATE_MEASURE_FAIL
+        given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_FAIL);
         mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
                 .andExpect(status().isOk())
-                .andExpect(jsonPath("$.description",is(GriffinOperationMessage.CREATE_MEASURE_SUCCESS.getDescription())))
-                .andExpect(jsonPath("$.code", is(GriffinOperationMessage.CREATE_MEASURE_SUCCESS.getCode())))
-        ;
-    }
+                .andExpect(jsonPath("$.description", is("Create Measure Failed")))
+                .andExpect(jsonPath("$.code", is(401)));
 
-    private Measure createATestMeasure(String name,String org)throws IOException,Exception{
-        HashMap<String,String> configMap1=new HashMap<>();
-        configMap1.put("database","default");
-        configMap1.put("table.name","test_data_src");
-        HashMap<String,String> configMap2=new HashMap<>();
-        configMap2.put("database","default");
-        configMap2.put("table.name","test_data_tgt");
-        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
-        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
-
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
-        String rules = "$source.uage > 100 AND $source.ue = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-
-        EvaluateRule eRule = new EvaluateRule(1,rules);
+        // CREATE_MEASURE_FAIL_DUPLICATE
+        given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
+        mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.description", is("Create Measure Failed, duplicate records")))
+                .andExpect(jsonPath("$.code", is(410)));
 
-        Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
-        return measure;
+        // CREATE_MEASURE_SUCCESS
+        given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
+        mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.description", is("Create Measure Succeed")))
+                .andExpect(jsonPath("$.code", is(201)));
     }
 
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
index 72cf7a4..5f929f3 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
@@ -1,125 +1,106 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.measure;
-
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
-import org.apache.griffin.core.measure.entity.Measure;
-import org.apache.griffin.core.measure.repo.DataConnectorRepo;
-import org.apache.griffin.core.measure.repo.EvaluateRuleRepo;
-import org.apache.griffin.core.measure.repo.MeasureRepo;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
-import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.test.context.jdbc.Sql;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-@RunWith(SpringRunner.class)
-@PropertySource("classpath:application.properties")
-@DataJpaTest
-//@Sql(value = {"classpath:Init_quartz.sql", "classpath:quartz-test.sql"})
-@Sql("classpath:test.sql")
-public class MeasureRepoTest {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(MeasureRepoTest.class);
-
-
-    @Autowired
-    private TestEntityManager testEntityManager;
-
-    @Autowired
-    private MeasureRepo measureRepo;
-    @Autowired
-    private DataConnectorRepo dataConnectorRepo;
-    @Autowired
-    private EvaluateRuleRepo evaluateRuleRepo;
-
-    @Test
-    public void testFindOrganizations() {
-        Iterable<String> orgs = measureRepo.findOrganizations();
-        System.out.println(orgs);
-        for (String org : orgs) {
-            assertThat(org).isEqualTo("eBay");
-        }
-
-    }
-
-    @Test
-    public void testFindNameByOrganization() {
-        List<String> names = measureRepo.findNameByOrganization("eBay");
-        assertThat(names.get(0)).isEqualTo("buy_rates_hourly");
-        assertThat(names.get(1)).isEqualTo("griffin_aver");
-    }
-
-    @Test
-    public void testFindOrgByName() {
-        assertThat(measureRepo.findOrgByName("buy_rates_hourly")).isEqualTo("eBay");
-        assertThat(measureRepo.findOrgByName("griffin_aver")).isEqualTo("eBay");
-    }
-
-   /* @Test
-    @Transactional(propagation = Propagation.NOT_SUPPORTED)
-    public void testUpdateMeasure() {
-        HashMap<String, String> sourceMap = new HashMap<>();
-        sourceMap.put("database", "griffin");
-        sourceMap.put("table.name", "count");
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.3", sourceMap);
-        HashMap<String, String> targetMap = new HashMap<>();
-        targetMap.put("database", "default");
-        targetMap.put("table.name", "avr_in");
-        DataConnector target = null;
-        try {
-            target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.4", new ObjectMapper().writeValueAsString(targetMap));
-        } catch (IOException e) {
-            LOGGER.error("Fail to convert map to string using ObjectMapper.");
-        }
-
-        EvaluateRule rule = new EvaluateRule(0, "$source['uid'] == $target['url'] AND $source['uage'] == $target['createdts']");
-        //save before flushing
-        dataConnectorRepo.save(source);
-        dataConnectorRepo.save(target);
-        evaluateRuleRepo.save(rule);
-        measureRepo.updateMeasure((long) 1, "new desc2", "Paypal", source, target, rule);
-        for (Measure measure : measureRepo.findAll()) {
-            if (measure.getId().equals((long) 1)) {
-                assertThat(measure.getDescription()).isEqualTo("new desc2");
-                assertThat(measure.getOrganization()).isEqualTo("Paypal");
-                assertThat(measure.getSource()).isEqualTo(source);
-                assertThat(measure.getTarget()).isEqualTo(target);
-                assertThat(measure.getEvaluateRule()).isEqualTo(rule);
-            }
-        }
-
-    }*/
-}
+package org.apache.griffin.core.measure;//package org.apache.griffin.core.measure;
+//
+//import org.apache.griffin.core.measure.entity.DataConnector;
+//import org.apache.griffin.core.measure.entity.EvaluateRule;
+//import org.apache.griffin.core.measure.entity.Measure;
+//import org.apache.griffin.core.measure.repo.DataConnectorRepo;
+//import org.apache.griffin.core.measure.repo.EvaluateRuleRepo;
+//import org.apache.griffin.core.measure.repo.MeasureRepo;
+//import org.codehaus.jackson.map.ObjectMapper;
+//import org.junit.Test;
+//import org.junit.runner.RunWith;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//import org.springframework.beans.factory.annotation.Autowired;
+//import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
+//import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
+//import org.springframework.context.annotation.PropertySource;
+//import org.springframework.test.context.jdbc.Sql;
+//import org.springframework.test.context.junit4.SpringRunner;
+//import org.springframework.transaction.annotation.Propagation;
+//import org.springframework.transaction.annotation.Transactional;
+//
+//import java.io.IOException;
+//import java.util.HashMap;
+//import java.util.List;
+//
+//import static org.assertj.core.api.Assertions.assertThat;
+//
+//@RunWith(SpringRunner.class)
+//@PropertySource("classpath:application.properties")
+//@DataJpaTest
+////@Sql(value = {"classpath:Init_quartz.sql", "classpath:quartz-test.sql"})
+//@Sql("classpath:test.sql")
+//public class MeasureRepoTest {
+//
+//    private static final Logger LOGGER = LoggerFactory.getLogger(MeasureRepoTest.class);
+//
+//
+//    @Autowired
+//    private TestEntityManager testEntityManager;
+//
+//    @Autowired
+//    private MeasureRepo measureRepo;
+//    @Autowired
+//    private DataConnectorRepo dataConnectorRepo;
+//    @Autowired
+//    private EvaluateRuleRepo evaluateRuleRepo;
+//
+//    @Test
+//    public void testFindOrganizations() {
+//        Iterable<String> orgs = measureRepo.findOrganizations();
+//        System.out.println(orgs);
+//        for (String org : orgs) {
+//            assertThat(org).isEqualTo("eBay");
+//        }
+//
+//    }
+//
+//    @Test
+//    public void testFindNameByOrganization() {
+//        List<String> names = measureRepo.findNameByOrganization("eBay");
+//        assertThat(names.get(0)).isEqualTo("buy_rates_hourly");
+//        assertThat(names.get(1)).isEqualTo("griffin_aver");
+//    }
+//
+//    @Test
+//    public void testFindOrgByName() {
+//        assertThat(measureRepo.findOrgByName("buy_rates_hourly")).isEqualTo("eBay");
+//        assertThat(measureRepo.findOrgByName("griffin_aver")).isEqualTo("eBay");
+//    }
+//
+//   /* @Test
+//    @Transactional(propagation = Propagation.NOT_SUPPORTED)
+//    public void testUpdateMeasure() {
+//        HashMap<String, String> sourceMap = new HashMap<>();
+//        sourceMap.put("database", "griffin");
+//        sourceMap.put("table.name", "count");
+//        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.3", sourceMap);
+//        HashMap<String, String> targetMap = new HashMap<>();
+//        targetMap.put("database", "default");
+//        targetMap.put("table.name", "avr_in");
+//        DataConnector target = null;
+//        try {
+//            target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.4", new ObjectMapper().writeValueAsString(targetMap));
+//        } catch (IOException e) {
+//            LOGGER.error("Fail to convert map to string using ObjectMapper.");
+//        }
+//
+//        EvaluateRule rule = new EvaluateRule(0, "$source['uid'] == $target['url'] AND $source['uage'] == $target['createdts']");
+//        //save before flushing
+//        dataConnectorRepo.save(source);
+//        dataConnectorRepo.save(target);
+//        evaluateRuleRepo.save(rule);
+//        measureRepo.updateMeasure((long) 1, "new desc2", "Paypal", source, target, rule);
+//        for (Measure measure : measureRepo.findAll()) {
+//            if (measure.getId().equals((long) 1)) {
+//                assertThat(measure.getDescription()).isEqualTo("new desc2");
+//                assertThat(measure.getOrganization()).isEqualTo("Paypal");
+//                assertThat(measure.getSource()).isEqualTo(source);
+//                assertThat(measure.getTarget()).isEqualTo(target);
+//                assertThat(measure.getEvaluateRule()).isEqualTo(rule);
+//            }
+//        }
+//
+//    }*/
+//}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
index 00fad9c..bc6b2ae 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
@@ -20,10 +20,7 @@ under the License.
 package org.apache.griffin.core.measure;
 
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.griffin.core.job.JobServiceImpl;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
 import org.apache.griffin.core.util.GriffinOperationMessage;
@@ -32,180 +29,117 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.boot.test.mock.mockito.MockBean;
-import org.springframework.context.annotation.Bean;
 import org.springframework.test.context.junit4.SpringRunner;
 
-import java.io.IOException;
-import java.util.*;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
 
-import static org.assertj.core.api.Assertions.fail;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
+import static org.assertj.core.api.Assertions.assertThat;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.doNothing;
 
 @RunWith(SpringRunner.class)
 public class MeasureServiceImplTest {
 
-    /*@TestConfiguration
-    public static class MeasureServiceImplConfiguration{
-        @Bean
-        public MeasureServiceImpl service(){
-            return new MeasureServiceImpl();
-        }
-
-        @Bean
-        public JobServiceImpl JobService(){
-            return new JobServiceImpl();
-        }
-
-    }*/
-
 
     @InjectMocks
     private MeasureServiceImpl service;
     @Mock
     private MeasureRepo measureRepo;
+    @Mock
+    private JobServiceImpl jobService;
 
     @Before
-    public void setup(){
+    public void setup() {
     }
 
     @Test
-    public void testGetAllMeasures(){
-        try {
-            Iterable<Measure> tmp = service.getAllAliveMeasures();
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all Measure from dbs");
-        }
+    public void testGetAllMeasures() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        given(measureRepo.findByDeleted(false)).willReturn(Arrays.asList(measure));
+        List<Measure> measures = (List<Measure>) service.getAllAliveMeasures();
+        assertThat(measures.size()).isEqualTo(1);
+        assertThat(measures.get(0).getName()).isEqualTo("view_item_hourly");
     }
 
     @Test
-    public void testGetMeasuresById(){
-        try {
-            Measure tmp = service.getMeasureById(1);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get Measure in db By Id: 1");
-        }
+    public void testGetMeasuresById() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        given(measureRepo.findOne(1L)).willReturn(measure);
+        Measure m = service.getMeasureById(1);
+        assertEquals(m.getName(), measure.getName());
     }
 
-  /*  @Test
-    public void testGetMeasuresByName(){
-        try {
-            Measure tmp = service.getMeasureByName("viewitem_hourly");
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get Measure in db By name: viewitem_hourly");
-        }
-    }*/
 
     @Test
-    public void testDeleteMeasuresById(){
-        try {
-            service.deleteMeasureById(1L);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot delete Measure in db By Id: 1");
-        }
+    public void testDeleteMeasuresById() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        // RESOURCE_NOT_FOUND
+        given(measureRepo.exists(1L)).willReturn(false);
+        GriffinOperationMessage message1 = service.deleteMeasureById(1L);
+        assertEquals(message1, GriffinOperationMessage.RESOURCE_NOT_FOUND);
+
+        //DELETE_MEASURE_BY_ID_SUCCESS
+        given(measureRepo.exists(1L)).willReturn(true);
+        given(measureRepo.findOne(1L)).willReturn(measure);
+        doNothing().when(jobService).deleteJobsRelateToMeasure(measure);
+        given(measureRepo.save(measure)).willReturn(measure);
+        GriffinOperationMessage message = service.deleteMeasureById(1L);
+        assertEquals(message, GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS);
     }
 
-    /*@Test
-    public void testDeleteMeasuresByName(){
-        try {
-            String measureName="viewitem_hourly";
-            given(measureRepo.findByName(measureName)).willReturn(null);
-            GriffinOperationMessage message=service.deleteMeasureByName("viewitem_hourly");
-            assertEquals(message,GriffinOperationMessage.RESOURCE_NOT_FOUND);
-            assertTrue(true);
-
-            String org="bullseye";
-            Measure measure=createATestMeasure(measureName,org);
-            given(measureRepo.findByName(measureName)).willReturn(measure);
-            GriffinOperationMessage message1=service.deleteMeasureByName("viewitem_hourly");
-            assertEquals(message1,GriffinOperationMessage.DELETE_MEASURE_BY_NAME_SUCCESS);
-        }catch (Throwable t){
-            fail("Cannot delete Measure in db By name: viewitem_hourly");
-        }
-    }*/
     @Test
-    public void testCreateNewMeasure()  {
-        try {
-            // CREATE_MEASURE_SUCCESS
-            String measureName = "viewitem_hourly";
-            String org = "bullseye";
-            Measure measure = createATestMeasure(measureName, org);
-            given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
-            given(measureRepo.save(measure)).willReturn(measure);
-            GriffinOperationMessage message = service.createMeasure(measure);
-            assertEquals(message, GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
-            assertTrue(true);
-            // CREATE_MEASURE_FAIL_DUPLICATE
-            Measure measure1 = createATestMeasure(measureName, "bullseye1");
-            LinkedList<Measure> list = new LinkedList<>();
-            list.add(measure);
-            given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(list);
-            GriffinOperationMessage message1 = service.createMeasure(measure);
-            assertEquals(message1, GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
-            // CREATE_MEASURE_FAIL
-            given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
-            given(measureRepo.save(measure)).willReturn(null);
-            GriffinOperationMessage message2 = service.createMeasure(measure);
-            assertEquals(message2, GriffinOperationMessage.CREATE_MEASURE_FAIL);
-            }catch (Throwable t){
-                fail("Cannot create new measure viewitem_hourly");
-            }
+    public void testCreateNewMeasure() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        // CREATE_MEASURE_SUCCESS
+        String measureName = "view_item_hourly";
+        given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
+        given(measureRepo.save(measure)).willReturn(measure);
+        GriffinOperationMessage message = service.createMeasure(measure);
+        assertEquals(message, GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
+
+        // CREATE_MEASURE_FAIL_DUPLICATE
+        LinkedList<Measure> list = new LinkedList<>();
+        list.add(measure);
+        given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(list);
+        GriffinOperationMessage message1 = service.createMeasure(measure);
+        assertEquals(message1, GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
+
+        // CREATE_MEASURE_FAIL
+        given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
+        given(measureRepo.save(measure)).willReturn(null);
+        GriffinOperationMessage message2 = service.createMeasure(measure);
+        assertEquals(message2, GriffinOperationMessage.CREATE_MEASURE_FAIL);
     }
 
     @Test
-    public void testGetAllMeasureByOwner(){
-        try {
-            String measureName="viewitem_hourly";
-            String org="bullseye";
-            Measure measure=createATestMeasure(measureName,org);
-            String owner="test1";
-            given(measureRepo.findAll()).willReturn(Arrays.asList(measure));
-            List<Map<String, String>> namelist=service.getAllAliveMeasureNameIdByOwner(owner);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot get all measure name by owner test1");
-        }
+    public void testGetAllMeasureByOwner() throws Exception {
+        String owner = "test";
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        measure.setId(1L);
+        given(measureRepo.findByOwnerAndDeleted(owner, false)).willReturn(Arrays.asList(measure));
+        List<Map<String, String>> list = service.getAllAliveMeasureNameIdByOwner(owner);
+        assertEquals(list.get(0).get("name"), measure.getName());
     }
 
     @Test
-    public void testUpdateMeasure(){
-        try {
-            String measureName="viewitem_hourly";
-            String org="bullseye";
-            Measure measure=createATestMeasure(measureName,org);
-            GriffinOperationMessage message=service.updateMeasure(measure);
-            assertTrue(true);
-        }catch (Throwable t){
-            fail("Cannot create new measure viewitem_hourly");
-        }
+    public void testUpdateMeasure() throws Exception {
+        Measure measure = createATestMeasure("view_item_hourly", "ebay");
+        // RESOURCE_NOT_FOUND
+        given(measureRepo.exists(measure.getId())).willReturn(false);
+        GriffinOperationMessage message1 = service.updateMeasure(measure);
+        assertEquals(message1, GriffinOperationMessage.RESOURCE_NOT_FOUND);
+
+        //UPDATE_MEASURE_SUCCESS
+        given(measureRepo.exists(measure.getId())).willReturn(true);
+        given(measureRepo.save(measure)).willReturn(measure);
+        GriffinOperationMessage message2 = service.updateMeasure(measure);
+        assertEquals(message2, GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
     }
 
-    private Measure createATestMeasure(String name,String org)throws IOException,Exception{
-        HashMap<String,String> configMap1;
-        configMap1 = new HashMap<>();
-        configMap1.put("database","default");
-        configMap1.put("table.name","test_data_src");
-        HashMap<String,String> configMap2=new HashMap<>();
-        configMap2.put("database","default");
-        configMap2.put("table.name","test_data_tgt");
-        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
-        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
-        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-        EvaluateRule eRule = new EvaluateRule(1,rules);
-        Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
-        measure.setId(0L);
-        return measure;
-    }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
new file mode 100644
index 0000000..7d48f5d
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
@@ -0,0 +1,55 @@
+package org.apache.griffin.core.measure;
+
+
+import org.apache.griffin.core.measure.entity.*;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.quartz.JobDataMap;
+import org.quartz.Trigger;
+import org.quartz.impl.JobDetailImpl;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+public class MeasureTestHelper {
+    public static Measure createATestMeasure(String name, String org) throws Exception{
+        HashMap<String, String> configMap1 = new HashMap<>();
+        configMap1.put("database", "default");
+        configMap1.put("table.name", "test_data_src");
+        HashMap<String, String> configMap2 = new HashMap<>();
+        configMap2.put("database", "default");
+        configMap2.put("table.name", "test_data_tgt");
+        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
+        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
+
+        DataSource dataSource = new DataSource("source", Arrays.asList(new DataConnector("HIVE", "1.2", configJson1)));
+        DataSource targetSource = new DataSource("target", Arrays.asList(new DataConnector("HIVE", "1.2", configJson2)));
+
+        List<DataSource> dataSources = new ArrayList<>();
+        dataSources.add(dataSource);
+        dataSources.add(targetSource);
+        String rules = "source.id=target.id AND source.name=target.name AND source.age=target.age";
+        Rule rule = new Rule("griffin-dsl", "accuracy", rules);
+        EvaluateRule evaluateRule = new EvaluateRule(Arrays.asList(rule));
+        return new Measure(name, "description", org, "batch", "test", dataSources, evaluateRule);
+    }
+
+    public static JobDetailImpl createJobDetail() {
+        JobDetailImpl jobDetail = new JobDetailImpl();
+        JobDataMap jobInfoMap = new JobDataMap();
+        jobInfoMap.put("triggerState", Trigger.TriggerState.NORMAL);
+        jobInfoMap.put("measureId", "1");
+        jobInfoMap.put("sourcePattern", "YYYYMMdd-HH");
+        jobInfoMap.put("targetPattern", "YYYYMMdd-HH");
+        jobInfoMap.put("jobStartTime", "1506356105876");
+        jobInfoMap.put("interval", "3000");
+        jobInfoMap.put("deleted", "false");
+        jobInfoMap.put("blockStartTimestamp","1506634804254");
+        jobInfoMap.put("lastBlockStartTimestamp","1506634804254");
+        jobInfoMap.put("groupName","BA");
+        jobInfoMap.put("jobName","jobName");
+        jobDetail.setJobDataMap(jobInfoMap);
+        return jobDetail;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java b/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
index 6560e51..cd9e00e 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
@@ -19,8 +19,6 @@ under the License.
 
 package org.apache.griffin.core.measure.repo;
 
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.junit.Before;
 import org.junit.Test;
@@ -30,9 +28,9 @@ import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
 import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
 import org.springframework.test.context.junit4.SpringRunner;
 
-import java.util.HashMap;
 import java.util.List;
 
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
 import static org.assertj.core.api.Assertions.assertThat;
 
 @RunWith(SpringRunner.class)
@@ -53,56 +51,35 @@ public class MeasureRepoTest {
     }
 
     @Test
-    public void testFindAllOrganizations() throws Exception {
+    public void testFindAllOrganizations() {
         List<String> orgs = measureRepo.findOrganizations();
         assertThat(orgs.size()).isEqualTo(3);
     }
 
 
     @Test
-    public void testFindNameByOrganization() throws Exception {
+    public void testFindNameByOrganization() {
         List<String> orgs = measureRepo.findNameByOrganization("org1");
         assertThat(orgs.size()).isEqualTo(1);
-        assertThat(orgs.get(0)).isEqualToIgnoringCase("m2");
+        assertThat(orgs.get(0)).isEqualToIgnoringCase("m1");
 
     }
 
     @Test
-    public void testFindOrgByName() throws Exception {
-        String org = measureRepo.findOrgByName("m3");
+    public void testFindOrgByName() {
+        String org = measureRepo.findOrgByName("m2");
         assertThat(org).isEqualTo("org2");
     }
 
-    private Measure createATestMeasure(String name,String org)throws Exception{
-        HashMap<String,String> configMap1=new HashMap<>();
-        configMap1.put("database","default");
-        configMap1.put("table.name","test_data_src");
-        HashMap<String,String> configMap2=new HashMap<>();
-        configMap2.put("database","default");
-        configMap2.put("table.name","test_data_tgt");
-        String configJson1 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap1);
-        String configJson2 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap2);
-
-        DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
-        DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
-        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-
-        EvaluateRule eRule = new EvaluateRule(1,rules);
-
-        Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
-
-        return measure;
-    }
 
     public void setEntityManager() throws Exception {
-        Measure measure=createATestMeasure("m1","bullseye");
+        Measure measure = createATestMeasure("m1", "org1");
         entityManager.persistAndFlush(measure);
 
-        Measure measure2=createATestMeasure("m2","org1");
+        Measure measure2 = createATestMeasure("m2", "org2");
         entityManager.persistAndFlush(measure2);
 
-        Measure measure3=createATestMeasure("m3","org2");
+        Measure measure3 = createATestMeasure("m3", "org3");
         entityManager.persistAndFlush(measure3);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
new file mode 100644
index 0000000..d1bbdb6
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
@@ -0,0 +1,128 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.web.servlet.MockMvc;
+
+import java.util.*;
+
+import static org.hamcrest.Matchers.*;
+import static org.mockito.BDDMockito.given;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+@RunWith(SpringRunner.class)
+@WebMvcTest(value = HiveMetaStoreController.class, secure = false)
+public class HiveMetaStoreControllerTest {
+
+    @Autowired
+    private MockMvc mockMvc;
+
+    @MockBean
+    private HiveMetaStoreService hiveMetaStoreService;
+
+
+    @Before
+    public void setup() {
+    }
+
+    @Test
+    public void testGetAllDatabases() throws Exception {
+        String dbName = "default";
+        given(hiveMetaStoreService.getAllDatabases()).willReturn(Arrays.asList(dbName));
+
+        mockMvc.perform(get("/metadata/hive/db"))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.[0]", is(dbName)));
+    }
+
+    @Test
+    public void testGetDefAllTables() throws Exception {
+        String tableName = "table";
+        given(hiveMetaStoreService.getAllTableNames("")).willReturn(Arrays.asList(tableName));
+
+        mockMvc.perform(get("/metadata/hive/table"))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.[0]", is(tableName)));
+    }
+
+    @Test
+    public void testGetAllTableNames() throws Exception {
+        String dbName = "default";
+        String tableName = "table";
+        given(hiveMetaStoreService.getAllTableNames(dbName)).willReturn(Arrays.asList(tableName));
+
+        mockMvc.perform(get("/metadata/hive/allTableNames").param("db", dbName))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.[0]", is(tableName)));
+    }
+
+    @Test
+    public void testGetAllTablesWithDb() throws Exception {
+        String dbName = "default";
+        given(hiveMetaStoreService.getAllTable(dbName)).willReturn(Arrays.asList(new Table()));
+
+        mockMvc.perform(get("/metadata/hive/db/allTables").param("db", dbName))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.[0].tableName", is(nullValue())));
+    }
+
+    @Test
+    public void testGetAllTables() throws Exception {
+        Map<String, List<Table>> results = new HashMap<>();
+        results.put("table", new ArrayList<>());
+        given(hiveMetaStoreService.getAllTable()).willReturn(results);
+
+        mockMvc.perform(get("/metadata/hive/allTables"))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.table", hasSize(0)));
+    }
+
+    @Test
+    public void testGetDefTable() throws Exception {
+        String dbName = "";
+        String tableName = "table";
+        given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(new Table(tableName, null, null, 0, 0, 0, null, null, null, null, null, null));
+
+        mockMvc.perform(get("/metadata/hive/default/{table}", tableName))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.tableName", is(tableName)));
+    }
+
+    @Test
+    public void testGetTable() throws Exception {
+        String dbName = "default";
+        String tableName = "table";
+        given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(new Table(tableName, null, null, 0, 0, 0, null, null, null, null, null, null));
+
+        mockMvc.perform(get("/metadata/hive").param("db", dbName).param("table", tableName))
+                .andExpect(status().isOk())
+                .andExpect(jsonPath("$.tableName", is(tableName)));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
new file mode 100644
index 0000000..d732e34
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
@@ -0,0 +1,145 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.doNothing;
+
+
+@RunWith(SpringRunner.class)
+public class HiveMetaStoreServiceImplTest {
+
+    @TestConfiguration
+    public static class HiveMetaStoreServiceConfiguration {
+        @Bean
+        public HiveMetaStoreService service() {
+            return new HiveMetaStoreServiceImpl();
+        }
+    }
+
+    @MockBean
+    private HiveMetaStoreClient client;
+
+    @Autowired
+    private HiveMetaStoreService service;
+
+    @Before
+    public void setup() {
+
+    }
+
+    @Test
+    public void testGetAllDatabases() throws MetaException {
+        given(client.getAllDatabases()).willReturn(Arrays.asList("default"));
+        assertEquals(service.getAllDatabases().iterator().hasNext(), true);
+
+        // MetaException
+        given(client.getAllDatabases()).willThrow(MetaException.class);
+        doNothing().when(client).reconnect();
+        service.getAllDatabases();
+        assertTrue(service.getAllDatabases() == null);
+
+    }
+
+
+    @Test
+    public void testGetAllTableNames() throws MetaException {
+        String dbName = "default";
+        given(client.getAllTables(dbName)).willReturn(Arrays.asList(dbName));
+        assertEquals(service.getAllTableNames(dbName).iterator().hasNext(), true);
+
+        // MetaException
+        given(client.getAllTables(dbName)).willThrow(MetaException.class);
+        doNothing().when(client).reconnect();
+        assertTrue(service.getAllTableNames(dbName) == null);
+
+    }
+
+    @Test
+    public void testGetAllTableByDBName() throws TException {
+        String useDbName = "default";
+        String tableName = "table";
+        given(client.getAllTables(useDbName)).willReturn(Arrays.asList(tableName));
+        given(client.getTable(useDbName, tableName)).willReturn(new Table());
+        assertEquals(service.getAllTable(useDbName).size(), 1);
+
+        // MetaException
+        given(client.getAllTables(useDbName)).willThrow(MetaException.class);
+        doNothing().when(client).reconnect();
+        assertEquals(service.getAllTable(useDbName).size(), 0);
+    }
+
+    @Test
+    public void testGetAllTable() throws TException {
+        String useDbName = "default";
+        String tableName = "table";
+        List<String> databases = Arrays.asList(useDbName);
+        given(client.getAllDatabases()).willReturn(databases);
+        given(client.getAllTables(databases.iterator().next())).willReturn(Arrays.asList(tableName));
+        given(client.getTable(useDbName, tableName)).willReturn(new Table());
+        assertEquals(service.getAllTable().size(), 1);
+
+        //pls attention:do not change the position of the following two MetaException test
+        //because we use throw exception,so they are in order.
+        // MetaException1
+        given(client.getAllDatabases()).willReturn(databases);
+        given(client.getAllTables(useDbName)).willThrow(MetaException.class);
+        doNothing().when(client).reconnect();
+        assertEquals(service.getAllTable().get(useDbName).size(), 0);
+
+        // MetaException2
+        given(client.getAllDatabases()).willThrow(MetaException.class);
+        doNothing().when(client).reconnect();
+        assertEquals(service.getAllTable().size(), 0);
+
+
+    }
+
+    @Test
+    public void testGetTable() throws Exception {
+        String dbName = "default";
+        String tableName = "tableName";
+        given(client.getTable(dbName, tableName)).willReturn(new Table());
+        assertTrue(service.getTable(dbName, tableName) != null);
+
+        //getTable throws Exception
+        given(client.getTable(dbName, tableName)).willThrow(Exception.class);
+        doNothing().when(client).reconnect();
+        assertTrue(service.getTable(dbName, tableName) == null);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
deleted file mode 100644
index 50ece98..0000000
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-@RunWith(SpringRunner.class)
-public class HiveMetastoreControllerTest {
-    private MockMvc mockMvc;
-
-    @Mock
-    HiveMetastoreServiceImpl hiveMetastoreService;
-
-    @InjectMocks
-    private HiveMetastoreController hiveMetastoreController;
-
-    @Before
-    public void setup(){
-        MockitoAnnotations.initMocks(this);
-        this.mockMvc = MockMvcBuilders.standaloneSetup(hiveMetastoreController).build();
-    }
-
-    @Test
-    public void test_getAllDatabases() throws Exception {
-        when(hiveMetastoreService.getAllDatabases()).thenReturn(null);
-        mockMvc.perform(get("/metadata/hive/db"))
-                .andExpect(status().isOk());
-        verify(hiveMetastoreService).getAllDatabases();
-    }
-    
-
-    @Test
-    public void test_getAllTableNames() throws Exception {
-        String db="default";
-        when(hiveMetastoreService.getAllTableNames(db)).thenReturn(null);
-        mockMvc.perform(get("/metadata/hive/allTableNames?db="+db))
-                .andExpect(status().isOk());
-        verify(hiveMetastoreService).getAllTableNames(db);
-    }
-
-    @Test
-    public void test_getAllTables() throws Exception {
-        String db="default";
-        when(hiveMetastoreService.getAllTablesByDbName(db)).thenReturn(null);
-        mockMvc.perform(get("/metadata/hive/db/allTables?db={db}",db))
-                .andExpect(status().isOk());
-        verify(hiveMetastoreService).getAllTablesByDbName(db);
-    }
-
-    /**
-     */
-    @Test
-    public void test_getAllTables2() throws Exception {
-        when(hiveMetastoreService.getAllTable()).thenReturn(null);
-        mockMvc.perform(get("/metadata/hive/allTables"))
-                .andExpect(status().isOk());
-        verify(hiveMetastoreService).getAllTable();
-    }
-
-
-
-    @Test
-    public void test_getTable() throws Exception{
-        String db="default";
-        String table="cout";
-        when(hiveMetastoreService.getTable(db,table)).thenReturn(null);
-        mockMvc.perform(get("/metadata/hive/table?db={db}&table={table}",db,table))
-                .andExpect(status().isOk());
-        verify(hiveMetastoreService).getTable(db,table);
-    }
-}


[4/4] incubator-griffin git commit: change json format, update unit test and fix hive connect

Posted by gu...@apache.org.
change json format,update unit test and fix hive connect

Author: Lionel Liu <bh...@163.com>
Author: Yao <de...@ebay.com>

Closes #126 from ahutsunshine/master.


Project: http://git-wip-us.apache.org/repos/asf/incubator-griffin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-griffin/commit/43f9dbf7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-griffin/tree/43f9dbf7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-griffin/diff/43f9dbf7

Branch: refs/heads/master
Commit: 43f9dbf7bb41a35d94e695ca23748eb67689fc80
Parents: 0a3de75
Author: Lionel Liu <bh...@163.com>
Authored: Mon Oct 9 15:12:30 2017 +0800
Committer: Lionel Liu <bh...@163.com>
Committed: Mon Oct 9 15:12:30 2017 +0800

----------------------------------------------------------------------
 .../griffin/core/GriffinWebApplication.java     |  44 +---
 .../griffin/core/common/SimpleCORSFilter.java   |   7 +-
 .../griffin/core/config/SwaggerConfig.java      |   1 +
 .../AutowiringSpringBeanJobFactory.java         |  34 +--
 .../core/config/jobConfig/SparkJobConfig.java   |   3 -
 .../core/error/exception/GriffinException.java  |  32 +--
 .../exception/RuntimeExceptionHandler.java      |  50 ++--
 .../apache/griffin/core/job/JobController.java  |  20 +-
 .../org/apache/griffin/core/job/JobService.java |   2 +-
 .../apache/griffin/core/job/JobServiceImpl.java | 209 ++++++++-------
 .../apache/griffin/core/job/SparkSubmitJob.java | 157 ++++++-----
 .../griffin/core/job/entity/JobInstance.java    |  11 +-
 .../griffin/core/job/entity/JobRequestBody.java |   3 -
 .../core/job/entity/LivySessionStates.java      |  21 +-
 .../griffin/core/job/entity/SparkJobDO.java     |  60 ++---
 .../griffin/core/job/repo/JobInstanceRepo.java  |  16 +-
 .../griffin/core/login/LoginController.java     |   7 +-
 .../apache/griffin/core/login/LoginService.java |   8 +-
 .../griffin/core/login/LoginServiceImpl.java    |  44 ++--
 .../griffin/core/measure/MeasureController.java |  14 +-
 .../core/measure/MeasureServiceImpl.java        |  38 +--
 .../core/measure/entity/AuditableEntity.java    |  20 +-
 .../core/measure/entity/DataConnector.java      |  70 ++---
 .../griffin/core/measure/entity/DataSource.java |  59 +++++
 .../core/measure/entity/EvaluateRule.java       |  41 ++-
 .../griffin/core/measure/entity/Measure.java    |  76 ++----
 .../griffin/core/measure/entity/Rule.java       |  75 ++++++
 .../core/measure/repo/DataConnectorRepo.java    |   2 -
 .../core/measure/repo/DataSourceRepo.java       |  26 ++
 .../core/measure/repo/EvaluateRuleRepo.java     |   2 -
 .../griffin/core/measure/repo/MeasureRepo.java  |  19 +-
 .../griffin/core/measure/repo/RuleRepo.java     |  26 ++
 .../metastore/hive/HiveMetaStoreController.java |  73 ++++++
 .../core/metastore/hive/HiveMetaStoreProxy.java |  78 ++++++
 .../metastore/hive/HiveMetaStoreService.java    |  39 +++
 .../hive/HiveMetaStoreServiceImpl.java          | 162 ++++++++++++
 .../metastore/hive/HiveMetastoreController.java |  60 -----
 .../core/metastore/hive/HiveMetastoreProxy.java |  78 ------
 .../metastore/hive/HiveMetastoreService.java    |  39 ---
 .../hive/HiveMetastoreServiceImpl.java          | 154 -----------
 .../metastore/kafka/KafkaSchemaController.java  |  14 +-
 .../metastore/kafka/KafkaSchemaServiceImpl.java |   2 +-
 .../griffin/core/metric/MetricController.java   |   6 +-
 .../griffin/core/metric/MetricServiceImpl.java  |   5 +-
 .../griffin/core/service/GriffinController.java |  57 ++--
 .../core/util/GriffinOperationMessage.java      |  27 +-
 .../apache/griffin/core/util/GriffinUtil.java   |  39 ++-
 .../griffin/core/job/JobControllerTest.java     |  75 +++---
 .../griffin/core/job/JobInstanceRepoTest.java   |  97 ++++---
 .../griffin/core/job/JobServiceImplTest.java    | 227 ++++++++--------
 .../griffin/core/job/SparkSubmitJobTest.java    | 213 +++------------
 .../core/measure/MeasureControllerTest.java     | 152 ++++++-----
 .../griffin/core/measure/MeasureRepoTest.java   | 231 ++++++++---------
 .../core/measure/MeasureServiceImplTest.java    | 216 ++++++----------
 .../griffin/core/measure/MeasureTestHelper.java |  55 ++++
 .../core/measure/repo/MeasureRepoTest.java      |  41 +--
 .../hive/HiveMetaStoreControllerTest.java       | 128 +++++++++
 .../hive/HiveMetaStoreServiceImplTest.java      | 145 +++++++++++
 .../hive/HiveMetastoreControllerTest.java       | 101 --------
 .../hive/HiveMetastoreServiceImplTest.java      | 167 ------------
 .../kafka/KafkaSchemaControllerTest.java        | 220 ++++++++--------
 .../kafka/KafkaSchemaServiceImplTest.java       | 257 +++++++++----------
 .../core/metric/MetricControllerTest.java       |  22 +-
 .../core/metric/MetricServiceImplTest.java      |  23 +-
 .../core/service/GriffinControllerTest.java     | 104 ++------
 .../griffin/core/util/GriffinUtilTest.java      |   6 +-
 service/src/test/resources/Init_quartz-h2.sql   |   1 -
 service/src/test/resources/sparkJob.properties  |  52 ----
 service/src/test/resources/test.sql             |  88 +++----
 69 files changed, 2218 insertions(+), 2433 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
index 69d7d21..e5f877b 100644
--- a/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
+++ b/service/src/main/java/org/apache/griffin/core/GriffinWebApplication.java
@@ -27,57 +27,17 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.context.annotation.Bean;
 import org.springframework.scheduling.annotation.EnableScheduling;
 
-//import org.apache.griffin.core.measure.repo.ConnectorConfigRepo;
 
 @SpringBootApplication
 @EnableScheduling
-public class GriffinWebApplication/* implements CommandLineRunner*/{
+public class GriffinWebApplication {
     private static final Logger LOGGER = LoggerFactory.getLogger(GriffinWebApplication.class);
+
     public static void main(String[] args) {
         LOGGER.info("application start");
         SpringApplication.run(GriffinWebApplication.class, args);
     }
 
-//    @Autowired
-//    MeasureRepo measureRepo;
-//    @Autowired
-//    EvaluateRuleRepo evaluateRuleRepo;
-//    @Autowired
-//    DataConnectorRepo connectorRepo;
-//
-//    public void run(String... strings) throws Exception {
-//        HashMap<String,String> configMap1=new HashMap<>();
-//        configMap1.put("database","default");
-//        configMap1.put("table.name","test_data_src");
-//        HashMap<String,String> configMap2=new HashMap<>();
-//        configMap2.put("database","default");
-//        configMap2.put("table.name","test_data_tgt");
-//        String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
-//        String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
-//
-//        DataConnector source = new DataConnector(ConnectorType.HIVE, "1.2", configJson1);
-//        DataConnector target = new DataConnector(ConnectorType.HIVE, "1.2", configJson2);
-//
-//        String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-//
-//        EvaluateRule eRule = new EvaluateRule(1,rules);
-//
-//        Measure measure = new Measure("viewitem_hourly","bevssoj description", Measure.MearuseType.accuracy, "bullseye", source, target, eRule,"test1");
-//        measureRepo.save(measure);
-//
-//        DataConnector source2 = new DataConnector(ConnectorType.HIVE, "1.2", configJson1);
-//        DataConnector target2 = new DataConnector(ConnectorType.HIVE, "1.2", configJson2);
-//        EvaluateRule eRule2 = new EvaluateRule(1,rules);
-//        Measure measure2 = new Measure("search_hourly","test description", Measure.MearuseType.accuracy, "bullseye", source2, target2, eRule2,"test1");
-//        measureRepo.save(measure2);
-//
-//        DataConnector source3 = new DataConnector(ConnectorType.HIVE, "1.2", configJson1);
-//        DataConnector target3 = new DataConnector(ConnectorType.HIVE, "1.2", configJson2);
-//        EvaluateRule eRule3 = new EvaluateRule(1,rules);
-//        Measure measure3 = new Measure("buy_hourly","test_just_inthere description", Measure.MearuseType.accuracy, "bullseye", source3, target3, eRule3,"test1");
-//        measureRepo.save(measure3);
-//    }
-
     @Bean
     public SimpleCORSFilter simpleFilter() {
         return new SimpleCORSFilter();

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java b/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
index b7946a3..a5cd687 100644
--- a/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
+++ b/service/src/main/java/org/apache/griffin/core/common/SimpleCORSFilter.java
@@ -17,6 +17,7 @@ specific language governing permissions and limitations
 under the License.
 */
 package org.apache.griffin.core.common;
+
 import javax.servlet.*;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
@@ -33,10 +34,12 @@ public class SimpleCORSFilter implements Filter {
     }
 
     @Override
-    public void init(FilterConfig filterConfig) {}
+    public void init(FilterConfig filterConfig) {
+    }
 
     @Override
-    public void destroy() {}
+    public void destroy() {
+    }
 
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/config/SwaggerConfig.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/config/SwaggerConfig.java b/service/src/main/java/org/apache/griffin/core/config/SwaggerConfig.java
index bc388e2..15dce47 100644
--- a/service/src/main/java/org/apache/griffin/core/config/SwaggerConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/SwaggerConfig.java
@@ -16,6 +16,7 @@ KIND, either express or implied.  See the License for the
 specific language governing permissions and limitations
 under the License.
 */
+
 package org.apache.griffin.core.config;
 
 import org.springframework.context.annotation.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java b/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
index 3cbfc7d..be2c02d 100644
--- a/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
+++ b/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
@@ -28,27 +28,27 @@ import org.springframework.context.ApplicationContextAware;
 import org.springframework.scheduling.quartz.SpringBeanJobFactory;
 
 public final class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory
-		implements ApplicationContextAware {
-  private static final Logger LOGGER = LoggerFactory.getLogger(AutowiringSpringBeanJobFactory.class);
+        implements ApplicationContextAware {
+    private static final Logger LOGGER = LoggerFactory.getLogger(AutowiringSpringBeanJobFactory.class);
 
-	private transient AutowireCapableBeanFactory beanFactory;
+    private transient AutowireCapableBeanFactory beanFactory;
 
-	@Override
-	public void setApplicationContext(final ApplicationContext context) {
-		beanFactory = context.getAutowireCapableBeanFactory();
-	}
+    @Override
+    public void setApplicationContext(final ApplicationContext context) {
+        beanFactory = context.getAutowireCapableBeanFactory();
+    }
 
-	@Override
-	protected Object createJobInstance(final TriggerFiredBundle bundle) {
+    @Override
+    protected Object createJobInstance(final TriggerFiredBundle bundle) {
 
-    try {
-      final Object job = super.createJobInstance(bundle);
-      beanFactory.autowireBean(job);
-      return job;
+        try {
+            final Object job = super.createJobInstance(bundle);
+            beanFactory.autowireBean(job);
+            return job;
 
-    } catch (Exception e) {
-      LOGGER.error("fail to create job instance. "+e);
+        } catch (Exception e) {
+            LOGGER.error("fail to create job instance. {}", e.getMessage());
+        }
+        return null;
     }
-    return null;
-	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java b/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
index 2089ca2..4e41194 100644
--- a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
@@ -25,9 +25,6 @@ import org.springframework.context.annotation.Configuration;
 
 import java.util.Properties;
 
-/**
- * Created by xiangrchen on 7/26/17.
- */
 @Configuration
 public class SparkJobConfig {
     @Bean(name = "sparkJobProps")

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/error/exception/GriffinException.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/error/exception/GriffinException.java b/service/src/main/java/org/apache/griffin/core/error/exception/GriffinException.java
index 8d9d8a7..67fa8ae 100644
--- a/service/src/main/java/org/apache/griffin/core/error/exception/GriffinException.java
+++ b/service/src/main/java/org/apache/griffin/core/error/exception/GriffinException.java
@@ -22,20 +22,20 @@ package org.apache.griffin.core.error.exception;
 import org.springframework.http.HttpStatus;
 import org.springframework.web.bind.annotation.ResponseStatus;
 
-/**
- * Created by xiangrchen on 7/27/17.
- */
-public abstract class GriffinException extends RuntimeException{
-  @ResponseStatus(value = HttpStatus.GATEWAY_TIMEOUT,reason="Fail to Connect Kafka")
-  public static class KafkaConnectionException extends GriffinException{
-  }
-  @ResponseStatus(value = HttpStatus.GATEWAY_TIMEOUT,reason="Fail to Connect Hive")
-  public static class HiveConnectionException extends GriffinException{
-  }
-  @ResponseStatus(value = HttpStatus.NOT_FOUND,reason="Fail to Get HealthInfo")
-  public static class GetHealthInfoFailureException extends GriffinException {
-  }
-  @ResponseStatus(value = HttpStatus.NOT_FOUND,reason="Fail to Get Jobs")
-  public static class GetJobsFailureException extends GriffinException {
-  }
+public abstract class GriffinException extends RuntimeException {
+    @ResponseStatus(value = HttpStatus.GATEWAY_TIMEOUT, reason = "Fail to Connect Kafka")
+    public static class KafkaConnectionException extends GriffinException {
+    }
+
+    @ResponseStatus(value = HttpStatus.GATEWAY_TIMEOUT, reason = "Fail to Connect Hive")
+    public static class HiveConnectionException extends GriffinException {
+    }
+
+    @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Fail to Get HealthInfo")
+    public static class GetHealthInfoFailureException extends GriffinException {
+    }
+
+    @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Fail to Get Jobs")
+    public static class GetJobsFailureException extends GriffinException {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/error/exception/RuntimeExceptionHandler.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/error/exception/RuntimeExceptionHandler.java b/service/src/main/java/org/apache/griffin/core/error/exception/RuntimeExceptionHandler.java
index ba1d8ed..d35fc44 100644
--- a/service/src/main/java/org/apache/griffin/core/error/exception/RuntimeExceptionHandler.java
+++ b/service/src/main/java/org/apache/griffin/core/error/exception/RuntimeExceptionHandler.java
@@ -16,6 +16,7 @@ KIND, either express or implied.  See the License for the
 specific language governing permissions and limitations
 under the License.
 */
+
 package org.apache.griffin.core.error.exception;
 
 import org.apache.griffin.core.util.GriffinOperationMessage;
@@ -32,33 +33,30 @@ import org.springframework.web.context.request.ServletWebRequest;
 import javax.servlet.http.HttpServletRequest;
 import java.util.Map;
 
-/**
- * Created by xiangrchen on 7/24/17.
- */
 @ControllerAdvice
 @ResponseBody
 public class RuntimeExceptionHandler {
-  private static final Logger LOGGER = LoggerFactory.getLogger(RuntimeExceptionHandler.class);
-
-  @ExceptionHandler(RuntimeException.class)
-  public ResponseEntity<Map<String, Object>> handleUnexpectedRuntimeException(RuntimeException e, HttpServletRequest request){
-    LOGGER.error("Unexpected RuntimeException. "+e);
-    return setExceptionResponse(request, HttpStatus.INTERNAL_SERVER_ERROR, GriffinOperationMessage.UNEXPECTED_RUNTIME_EXCEPTION);
-  }
-
-  @ExceptionHandler(value = GriffinException.class)
-  public void handleCustomException(GriffinException e) throws GriffinException {
-    throw e;
-  }
-
-  private ResponseEntity<Map<String, Object>> setExceptionResponse(HttpServletRequest request, HttpStatus status,
-      GriffinOperationMessage message) {
-    request.setAttribute("javax.servlet.error.status_code", status.value());
-    request.setAttribute("javax.servlet.error.message", message.getDescription());
-    request.setAttribute("javax.servlet.error.error", status.toString());
-    request.setAttribute("javax.servlet.error.request_uri", request.getRequestURI());
-    Map<String, Object> map=(new DefaultErrorAttributes())
-        .getErrorAttributes(new ServletWebRequest(request), false);
-    return new ResponseEntity(map, status);
-  }
+    private static final Logger LOGGER = LoggerFactory.getLogger(RuntimeExceptionHandler.class);
+
+    @ExceptionHandler(RuntimeException.class)
+    public ResponseEntity<Map<String, Object>> handleUnexpectedRuntimeException(RuntimeException e, HttpServletRequest request) {
+        LOGGER.error("Unexpected RuntimeException. " + e);
+        return setExceptionResponse(request, HttpStatus.INTERNAL_SERVER_ERROR, GriffinOperationMessage.UNEXPECTED_RUNTIME_EXCEPTION);
+    }
+
+    @ExceptionHandler(value = GriffinException.class)
+    public void handleCustomException(GriffinException e) throws GriffinException {
+        throw e;
+    }
+
+    private ResponseEntity<Map<String, Object>> setExceptionResponse(HttpServletRequest request, HttpStatus status,
+                                                                     GriffinOperationMessage message) {
+        request.setAttribute("javax.servlet.error.status_code", status.value());
+        request.setAttribute("javax.servlet.error.message", message.getDescription());
+        request.setAttribute("javax.servlet.error.error", status.toString());
+        request.setAttribute("javax.servlet.error.request_uri", request.getRequestURI());
+        Map<String, Object> map = (new DefaultErrorAttributes())
+                .getErrorAttributes(new ServletWebRequest(request), false);
+        return new ResponseEntity(map, status);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/JobController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobController.java b/service/src/main/java/org/apache/griffin/core/job/JobController.java
index ec515b2..3254530 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobController.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobController.java
@@ -40,32 +40,32 @@ public class JobController {
     @Autowired
     private JobService jobService;
 
-    @RequestMapping(value = "/",method = RequestMethod.GET)
+    @RequestMapping(value = "/", method = RequestMethod.GET)
     public List<Map<String, Serializable>> getJobs() {
         return jobService.getAliveJobs();
     }
 
     @RequestMapping(value = "", method = RequestMethod.POST)
     public GriffinOperationMessage addJob(@RequestParam("group") String groupName,
-                                           @RequestParam("jobName") String jobName,
-                                           @RequestParam("measureId") Long measureId,
-                                           @RequestBody JobRequestBody jobRequestBody) {
-        return jobService.addJob(groupName,jobName, measureId, jobRequestBody);
+                                          @RequestParam("jobName") String jobName,
+                                          @RequestParam("measureId") Long measureId,
+                                          @RequestBody JobRequestBody jobRequestBody) {
+        return jobService.addJob(groupName, jobName, measureId, jobRequestBody);
     }
 
     @RequestMapping(value = "", method = RequestMethod.DELETE)
     public GriffinOperationMessage deleteJob(@RequestParam("group") String group, @RequestParam("jobName") String jobName) {
-        return jobService.deleteJob(group,jobName);
+        return jobService.deleteJob(group, jobName);
     }
 
-    @RequestMapping(value = "/instances",method = RequestMethod.GET)
+    @RequestMapping(value = "/instances", method = RequestMethod.GET)
     public List<JobInstance> findInstancesOfJob(@RequestParam("group") String group, @RequestParam("jobName") String jobName,
                                                 @RequestParam("page") int page, @RequestParam("size") int size) {
-        return jobService.findInstancesOfJob(group,jobName,page,size);
+        return jobService.findInstancesOfJob(group, jobName, page, size);
     }
 
-    @RequestMapping(value = "/health",method = RequestMethod.GET)
-    public JobHealth getHealthInfo()  {
+    @RequestMapping(value = "/health", method = RequestMethod.GET)
+    public JobHealth getHealthInfo() {
         return jobService.getHealthInfo();
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/JobService.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobService.java b/service/src/main/java/org/apache/griffin/core/job/JobService.java
index 4482da5..23f8a82 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobService.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobService.java
@@ -36,7 +36,7 @@ public interface JobService {
 
     GriffinOperationMessage pauseJob(String group, String name);
 
-    GriffinOperationMessage deleteJob(String groupName,String jobName);
+    GriffinOperationMessage deleteJob(String groupName, String jobName);
 
     List<JobInstance> findInstancesOfJob(String group, String name, int page, int size);
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java b/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
index 41bd399..5be3f8d 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobServiceImpl.java
@@ -48,9 +48,7 @@ import java.io.IOException;
 import java.io.Serializable;
 import java.util.*;
 
-import static org.apache.griffin.core.util.GriffinOperationMessage.CREATE_JOB_FAIL;
-import static org.apache.griffin.core.util.GriffinOperationMessage.PAUSE_JOB_SUCCESS;
-import static org.apache.griffin.core.util.GriffinOperationMessage.SET_JOB_DELETED_STATUS_SUCCESS;
+import static org.apache.griffin.core.util.GriffinOperationMessage.*;
 import static org.quartz.JobBuilder.newJob;
 import static org.quartz.JobKey.jobKey;
 import static org.quartz.TriggerBuilder.newTrigger;
@@ -61,13 +59,13 @@ public class JobServiceImpl implements JobService {
     private static final Logger LOGGER = LoggerFactory.getLogger(JobServiceImpl.class);
 
     @Autowired
-    SchedulerFactoryBean factory;
+    private SchedulerFactoryBean factory;
     @Autowired
-    JobInstanceRepo jobInstanceRepo;
+    private JobInstanceRepo jobInstanceRepo;
     @Autowired
-    Properties sparkJobProps;
+    private Properties sparkJobProps;
 
-    public JobServiceImpl(){
+    public JobServiceImpl() {
     }
 
     @Override
@@ -77,14 +75,14 @@ public class JobServiceImpl implements JobService {
         try {
             for (String groupName : scheduler.getJobGroupNames()) {
                 for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
-                    Map jobInfoMap = genJobInfoMap(scheduler, jobKey);
-                    if(jobInfoMap.size()!=0 && isJobDeleted(scheduler, jobKey) == false){
+                    Map jobInfoMap = getJobInfoMap(scheduler, jobKey);
+                    if (jobInfoMap.size() != 0 && !isJobDeleted(scheduler, jobKey)) {
                         list.add(jobInfoMap);
                     }
                 }
             }
         } catch (SchedulerException e) {
-            LOGGER.error("failed to get running jobs."+e);
+            LOGGER.error("failed to get running jobs.{}", e.getMessage());
             throw new GetJobsFailureException();
         }
         return list;
@@ -92,44 +90,41 @@ public class JobServiceImpl implements JobService {
 
     private boolean isJobDeleted(Scheduler scheduler, JobKey jobKey) throws SchedulerException {
         JobDataMap jobDataMap = scheduler.getJobDetail(jobKey).getJobDataMap();
-        boolean status=jobDataMap.getBooleanFromString("deleted");
-        return status;
+        return jobDataMap.getBooleanFromString("deleted");
     }
 
-    public Map genJobInfoMap(Scheduler scheduler,JobKey jobKey) throws SchedulerException {
+    private Map getJobInfoMap(Scheduler scheduler, JobKey jobKey) throws SchedulerException {
         List<Trigger> triggers = (List<Trigger>) scheduler.getTriggersOfJob(jobKey);
         Map<String, Serializable> jobInfoMap = new HashMap<>();
-        if (triggers==null || triggers.size() == 0){
+        if (triggers == null || triggers.size() == 0) {
             return jobInfoMap;
         }
         JobDetail jd = scheduler.getJobDetail(jobKey);
         Date nextFireTime = triggers.get(0).getNextFireTime();
-        Date previousFireTime=triggers.get(0).getPreviousFireTime();
-        Trigger.TriggerState triggerState=scheduler.getTriggerState(triggers.get(0).getKey());
+        Date previousFireTime = triggers.get(0).getPreviousFireTime();
+        Trigger.TriggerState triggerState = scheduler.getTriggerState(triggers.get(0).getKey());
 
         jobInfoMap.put("jobName", jobKey.getName());
         jobInfoMap.put("groupName", jobKey.getGroup());
-        if (nextFireTime!=null){
+        if (nextFireTime != null) {
             jobInfoMap.put("nextFireTime", nextFireTime.getTime());
-        }
-        else {
+        } else {
             jobInfoMap.put("nextFireTime", -1);
         }
-        if (previousFireTime!=null) {
+        if (previousFireTime != null) {
             jobInfoMap.put("previousFireTime", previousFireTime.getTime());
-        }
-        else {
+        } else {
             jobInfoMap.put("previousFireTime", -1);
         }
-        jobInfoMap.put("triggerState",triggerState);
+        jobInfoMap.put("triggerState", triggerState);
         jobInfoMap.put("measureId", jd.getJobDataMap().getString("measureId"));
-        jobInfoMap.put("sourcePattern",jd.getJobDataMap().getString("sourcePattern"));
-        jobInfoMap.put("targetPattern",jd.getJobDataMap().getString("targetPattern"));
-        if(StringUtils.isNotEmpty(jd.getJobDataMap().getString("blockStartTimestamp"))) {
+        jobInfoMap.put("sourcePattern", jd.getJobDataMap().getString("sourcePattern"));
+        jobInfoMap.put("targetPattern", jd.getJobDataMap().getString("targetPattern"));
+        if (StringUtils.isNotEmpty(jd.getJobDataMap().getString("blockStartTimestamp"))) {
             jobInfoMap.put("blockStartTimestamp", jd.getJobDataMap().getString("blockStartTimestamp"));
         }
-        jobInfoMap.put("jobStartTime",jd.getJobDataMap().getString("jobStartTime"));
-        jobInfoMap.put("interval",jd.getJobDataMap().getString("interval"));
+        jobInfoMap.put("jobStartTime", jd.getJobDataMap().getString("jobStartTime"));
+        jobInfoMap.put("interval", jd.getJobDataMap().getString("interval"));
         return jobInfoMap;
     }
 
@@ -137,21 +132,20 @@ public class JobServiceImpl implements JobService {
     public GriffinOperationMessage addJob(String groupName, String jobName, Long measureId, JobRequestBody jobRequestBody) {
         int interval;
         Date jobStartTime;
-        try{
+        try {
             interval = Integer.parseInt(jobRequestBody.getInterval());
-            jobStartTime=new Date(Long.parseLong(jobRequestBody.getJobStartTime()));
-            setJobStartTime(jobStartTime,interval);
-        }catch (Exception e){
-            LOGGER.info("jobStartTime or interval format error! "+e);
+            jobStartTime = new Date(Long.parseLong(jobRequestBody.getJobStartTime()));
+            setJobStartTime(jobStartTime, interval);
+        } catch (Exception e) {
+            LOGGER.info("jobStartTime or interval format error! {}", e.getMessage());
             return CREATE_JOB_FAIL;
         }
         try {
             Scheduler scheduler = factory.getObject();
             TriggerKey triggerKey = triggerKey(jobName, groupName);
             if (scheduler.checkExists(triggerKey)) {
-                LOGGER.error("the triggerKey(jobName,groupName) "+jobName+" has been used.");
+                LOGGER.error("the triggerKey(jobName,groupName) {} has been used.", jobName);
                 return CREATE_JOB_FAIL;
-                //scheduler.unscheduleJob(triggerKey);
             }
             JobKey jobKey = jobKey(jobName, groupName);
             JobDetail jobDetail;
@@ -179,25 +173,25 @@ public class JobServiceImpl implements JobService {
             scheduler.scheduleJob(trigger);
             return GriffinOperationMessage.CREATE_JOB_SUCCESS;
         } catch (SchedulerException e) {
-            LOGGER.error("SchedulerException when add job.", e);
+            LOGGER.error("SchedulerException when add job. {}", e.getMessage());
             return CREATE_JOB_FAIL;
         }
     }
 
-    public void setJobStartTime(Date jobStartTime,int interval){
-        long currentTimestamp=System.currentTimeMillis();
-        long jobstartTimestamp=jobStartTime.getTime();
+    private void setJobStartTime(Date jobStartTime, int interval) {
+        long currentTimestamp = System.currentTimeMillis();
+        long jobStartTimestamp = jobStartTime.getTime();
         //if jobStartTime is before currentTimestamp, reset it with a future time
-        if(jobStartTime.before(new Date(currentTimestamp))){
-            long n=(currentTimestamp-jobstartTimestamp)/(long)(interval*1000);
-            jobstartTimestamp=jobstartTimestamp+(n+1)*(long)(interval*1000);
-            jobStartTime.setTime(jobstartTimestamp);
+        if (jobStartTime.before(new Date(currentTimestamp))) {
+            long n = (currentTimestamp - jobStartTimestamp) / (long) (interval * 1000);
+            jobStartTimestamp = jobStartTimestamp + (n + 1) * (long) (interval * 1000);
+            jobStartTime.setTime(jobStartTimestamp);
         }
     }
 
-    public void setJobData(JobDetail jobDetail, JobRequestBody jobRequestBody, Long measureId, String groupName, String jobName){
-        jobDetail.getJobDataMap().put("groupName",groupName);
-        jobDetail.getJobDataMap().put("jobName",jobName);
+    private void setJobData(JobDetail jobDetail, JobRequestBody jobRequestBody, Long measureId, String groupName, String jobName) {
+        jobDetail.getJobDataMap().put("groupName", groupName);
+        jobDetail.getJobDataMap().put("jobName", jobName);
         jobDetail.getJobDataMap().put("measureId", measureId.toString());
         jobDetail.getJobDataMap().put("sourcePattern", jobRequestBody.getSourcePattern());
         jobDetail.getJobDataMap().put("targetPattern", jobRequestBody.getTargetPattern());
@@ -209,26 +203,26 @@ public class JobServiceImpl implements JobService {
     }
 
     @Override
-    public GriffinOperationMessage pauseJob(String group, String name){
+    public GriffinOperationMessage pauseJob(String group, String name) {
         try {
             Scheduler scheduler = factory.getObject();
             scheduler.pauseJob(new JobKey(name, group));
             return GriffinOperationMessage.PAUSE_JOB_SUCCESS;
         } catch (SchedulerException e) {
-            LOGGER.error(GriffinOperationMessage.PAUSE_JOB_FAIL+""+e);
+            LOGGER.error("{} {}", GriffinOperationMessage.PAUSE_JOB_FAIL, e.getMessage());
             return GriffinOperationMessage.PAUSE_JOB_FAIL;
         }
     }
 
-    private GriffinOperationMessage setJobDeleted(String group, String name){
+    private GriffinOperationMessage setJobDeleted(String group, String name) {
         try {
             Scheduler scheduler = factory.getObject();
-            JobDetail jobDetail=scheduler.getJobDetail(new JobKey(name, group));
+            JobDetail jobDetail = scheduler.getJobDetail(new JobKey(name, group));
             jobDetail.getJobDataMap().putAsString("deleted", true);
             scheduler.addJob(jobDetail, true);
             return GriffinOperationMessage.SET_JOB_DELETED_STATUS_SUCCESS;
         } catch (SchedulerException e) {
-            LOGGER.error(GriffinOperationMessage.PAUSE_JOB_FAIL+""+e);
+            LOGGER.error("{} {}", GriffinOperationMessage.PAUSE_JOB_FAIL, e.getMessage());
             return GriffinOperationMessage.SET_JOB_DELETED_STATUS_FAIL;
         }
     }
@@ -237,6 +231,7 @@ public class JobServiceImpl implements JobService {
      * logically delete
      * 1. pause these jobs
      * 2. set these jobs as deleted status
+     *
      * @param group
      * @param name
      * @return
@@ -244,8 +239,8 @@ public class JobServiceImpl implements JobService {
     @Override
     public GriffinOperationMessage deleteJob(String group, String name) {
         //logically delete
-        if (pauseJob(group,name).equals(PAUSE_JOB_SUCCESS) &&
-                setJobDeleted(group, name).equals(SET_JOB_DELETED_STATUS_SUCCESS)){
+        if (pauseJob(group, name).equals(PAUSE_JOB_SUCCESS) &&
+                setJobDeleted(group, name).equals(SET_JOB_DELETED_STATUS_SUCCESS)) {
             return GriffinOperationMessage.DELETE_JOB_SUCCESS;
         }
         return GriffinOperationMessage.DELETE_JOB_FAIL;
@@ -255,87 +250,91 @@ public class JobServiceImpl implements JobService {
      * deleteJobsRelateToMeasure
      * 1. search jobs related to measure
      * 2. deleteJob
+     *
      * @param measure
      */
     public void deleteJobsRelateToMeasure(Measure measure) {
         Scheduler scheduler = factory.getObject();
         try {
-            for(JobKey jobKey: scheduler.getJobKeys(GroupMatcher.anyGroup())){//get all jobs
+            for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.anyGroup())) {//get all jobs
                 JobDetail jobDetail = scheduler.getJobDetail(jobKey);
                 JobDataMap jobDataMap = jobDetail.getJobDataMap();
-                if(jobDataMap.getString("measureId").equals(measure.getId().toString())){
+                if (jobDataMap.getString("measureId").equals(measure.getId().toString())) {
                     //select jobs related to measureId,
-                    deleteJob(jobKey.getGroup(),jobKey.getName());
-                    LOGGER.info(jobKey.getGroup()+" "+jobKey.getName()+" is paused and logically deleted.");
+                    deleteJob(jobKey.getGroup(), jobKey.getName());
+                    LOGGER.info("{} {} is paused and logically deleted.", jobKey.getGroup(), jobKey.getName());
                 }
             }
         } catch (SchedulerException e) {
-            LOGGER.error("Fail to stop jobs related to measure id: " + measure.getId()+"name: "+measure.getName());
+            LOGGER.error("Fail to stop jobs related to measure id: {} name: {}", measure.getId(), measure.getName());
+            LOGGER.error("Fail to stop jobs related to measure id: {} name: {}", measure.getId(), measure.getName());
         }
     }
 
     @Override
     public List<JobInstance> findInstancesOfJob(String group, String jobName, int page, int size) {
         //query and return instances
-        Pageable pageRequest=new PageRequest(page,size, Sort.Direction.DESC,"timestamp");
-        return jobInstanceRepo.findByGroupNameAndJobName(group,jobName,pageRequest);
+        Pageable pageRequest = new PageRequest(page, size, Sort.Direction.DESC, "timestamp");
+        return jobInstanceRepo.findByGroupNameAndJobName(group, jobName, pageRequest);
     }
 
     @Scheduled(fixedDelayString = "${jobInstance.fixedDelay.in.milliseconds}")
-    public void syncInstancesOfAllJobs(){
-        List<Object> groupJobList=jobInstanceRepo.findGroupWithJobName();
-        for (Object groupJobObj : groupJobList){
-            try{
-                Object[] groupJob=(Object[])groupJobObj;
-                if (groupJob!=null && groupJob.length==2){
-                    syncInstancesOfJob(groupJob[0].toString(),groupJob[1].toString());
+    public void syncInstancesOfAllJobs() {
+        List<Object> groupJobList = jobInstanceRepo.findGroupWithJobName();
+        for (Object groupJobObj : groupJobList) {
+            try {
+                Object[] groupJob = (Object[]) groupJobObj;
+                if (groupJob != null && groupJob.length == 2) {
+                    syncInstancesOfJob(groupJob[0].toString(), groupJob[1].toString());
                 }
-            }catch (Exception e){
-                LOGGER.error("schedule update instances of all jobs failed. "+e);
+            } catch (Exception e) {
+                LOGGER.error("schedule update instances of all jobs failed. {}", e.getMessage());
             }
         }
     }
 
     /**
      * call livy to update jobInstance table in mysql.
+     *
      * @param group
      * @param jobName
      */
-    public void syncInstancesOfJob(String group, String jobName) {
+    private void syncInstancesOfJob(String group, String jobName) {
         //update all instance info belongs to this group and job.
-        List<JobInstance> jobInstanceList=jobInstanceRepo.findByGroupNameAndJobName(group,jobName);
-        for (JobInstance jobInstance:jobInstanceList){
-            if (!LivySessionStates.isActive(jobInstance.getState())){
+        List<JobInstance> jobInstanceList = jobInstanceRepo.findByGroupNameAndJobName(group, jobName);
+        for (JobInstance jobInstance : jobInstanceList) {
+            if (!LivySessionStates.isActive(jobInstance.getState())) {
                 continue;
             }
-            String uri=sparkJobProps.getProperty("livy.uri")+"/"+jobInstance.getSessionId();
-            RestTemplate restTemplate=new RestTemplate();
-            String resultStr=null;
-            try{
-                resultStr=restTemplate.getForObject(uri,String.class);
-            }catch (Exception e){
-                LOGGER.error("spark session "+jobInstance.getSessionId()+" has overdue, set state as unknown!\n"+e);
+            String uri = sparkJobProps.getProperty("livy.uri") + "/" + jobInstance.getSessionId();
+            RestTemplate restTemplate = new RestTemplate();
+            String resultStr;
+            try {
+                resultStr = restTemplate.getForObject(uri, String.class);
+            } catch (Exception e) {
+                LOGGER.error("spark session {} has overdue, set state as unknown!\n {}", jobInstance.getSessionId(), e.getMessage());
                 //if server cannot get session from Livy, set State as unknown.
                 jobInstance.setState(LivySessionStates.State.unknown);
                 jobInstanceRepo.save(jobInstance);
                 continue;
             }
-            TypeReference<HashMap<String,Object>> type=new TypeReference<HashMap<String,Object>>(){};
-            HashMap<String,Object> resultMap;
+            TypeReference<HashMap<String, Object>> type = new TypeReference<HashMap<String, Object>>() {
+            };
+            HashMap<String, Object> resultMap;
             try {
-                resultMap = GriffinUtil.toEntity(resultStr,type);
+                resultMap = GriffinUtil.toEntity(resultStr, type);
             } catch (IOException e) {
-                LOGGER.error("jobInstance jsonStr convert to map failed. "+e);
+                LOGGER.error("jobInstance jsonStr convert to map failed. {}", e.getMessage());
                 continue;
             }
-            try{
-                if (resultMap!=null && resultMap.size()!=0){
+            try {
+                if (resultMap != null && resultMap.size() != 0) {
                     jobInstance.setState(LivySessionStates.State.valueOf(resultMap.get("state").toString()));
                     jobInstance.setAppId(resultMap.get("appId").toString());
-                    jobInstance.setAppUri(sparkJobProps.getProperty("spark.uri")+"/cluster/app/"+resultMap.get("appId").toString());
+                    jobInstance.setAppUri(sparkJobProps.getProperty("spark.uri") + "/cluster/app/" + resultMap.get("appId").toString());
                 }
-            }catch (Exception e){
-                LOGGER.warn(group+","+jobName+"job Instance has some null field (state or appId). "+e);
+            } catch (Exception e) {
+                LOGGER.warn("{},{} job Instance has some null field (state or appId). {}", group, jobName, e.getMessage());
                 continue;
             }
             jobInstanceRepo.save(jobInstance);
@@ -344,35 +343,35 @@ public class JobServiceImpl implements JobService {
 
     /**
      * a job is regard as healthy job when its latest instance is in healthy state.
+     *
      * @return
      */
     @Override
-    public JobHealth getHealthInfo()  {
-        Scheduler scheduler=factory.getObject();
-        int jobCount= 0;
-        int notHealthyCount=0;
+    public JobHealth getHealthInfo() {
+        Scheduler scheduler = factory.getObject();
+        int jobCount = 0;
+        int notHealthyCount = 0;
         try {
-            for (String groupName : scheduler.getJobGroupNames()){
-                for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))){
+            for (String groupName : scheduler.getJobGroupNames()) {
+                for (JobKey jobKey : scheduler.getJobKeys(GroupMatcher.jobGroupEquals(groupName))) {
                     jobCount++;
-                    String jobName=jobKey.getName();
-                    String jobGroup=jobKey.getGroup();
-                    Pageable pageRequest=new PageRequest(0,1, Sort.Direction.DESC,"timestamp");
+                    String jobName = jobKey.getName();
+                    String jobGroup = jobKey.getGroup();
+                    Pageable pageRequest = new PageRequest(0, 1, Sort.Direction.DESC, "timestamp");
                     JobInstance latestJobInstance;
-                    if (jobInstanceRepo.findByGroupNameAndJobName(jobGroup,jobName,pageRequest)!=null
-                            &&jobInstanceRepo.findByGroupNameAndJobName(jobGroup,jobName,pageRequest).size()>0){
-                        latestJobInstance=jobInstanceRepo.findByGroupNameAndJobName(jobGroup,jobName,pageRequest).get(0);
-                        if(!LivySessionStates.isHeathy(latestJobInstance.getState())){
+                    if (jobInstanceRepo.findByGroupNameAndJobName(jobGroup, jobName, pageRequest) != null
+                            && jobInstanceRepo.findByGroupNameAndJobName(jobGroup, jobName, pageRequest).size() > 0) {
+                        latestJobInstance = jobInstanceRepo.findByGroupNameAndJobName(jobGroup, jobName, pageRequest).get(0);
+                        if (!LivySessionStates.isHeathy(latestJobInstance.getState())) {
                             notHealthyCount++;
                         }
                     }
                 }
             }
         } catch (SchedulerException e) {
-            LOGGER.error(""+e);
+            LOGGER.error(e.getMessage());
             throw new GetHealthInfoFailureException();
         }
-        JobHealth jobHealth=new JobHealth(jobCount-notHealthyCount,jobCount);
-        return jobHealth;
+        return new JobHealth(jobCount - notHealthyCount, jobCount);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java b/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
index 47157e0..a6d7487 100644
--- a/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
+++ b/service/src/main/java/org/apache/griffin/core/job/SparkSubmitJob.java
@@ -21,14 +21,13 @@ package org.apache.griffin.core.job;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
 import org.apache.commons.lang.StringUtils;
 import org.apache.griffin.core.job.entity.JobInstance;
 import org.apache.griffin.core.job.entity.LivySessionStates;
 import org.apache.griffin.core.job.entity.SparkJobDO;
 import org.apache.griffin.core.job.repo.JobInstanceRepo;
 import org.apache.griffin.core.measure.entity.DataConnector;
+import org.apache.griffin.core.measure.entity.DataSource;
 import org.apache.griffin.core.measure.entity.Measure;
 import org.apache.griffin.core.measure.repo.MeasureRepo;
 import org.apache.griffin.core.util.GriffinUtil;
@@ -65,11 +64,11 @@ public class SparkSubmitJob implements Job {
      * for example
      * sourcePatternItems or targetPatternItems is like "YYYYMMDD","HH",...
      */
-    private String[] sourcePatternItems,targetPatternItems;
+    private String[] sourcePatternItems, targetPatternItems;
 
     private Measure measure;
-    private String sourcePattern,targetPattern;
-    private String blockStartTimestamp,lastBlockStartTimestamp;
+    private String sourcePattern, targetPattern;
+    private String blockStartTimestamp, lastBlockStartTimestamp;
     private String interval;
     private String uri;
     private RestTemplate restTemplate = new RestTemplate();
@@ -80,54 +79,82 @@ public class SparkSubmitJob implements Job {
 
     /**
      * execute method is used to submit sparkJobDO to Livy.
+     *
      * @param context
      */
     @Override
     public void execute(JobExecutionContext context) {
         JobDetail jd = context.getJobDetail();
-        String groupName=jd.getJobDataMap().getString("groupName");
-        String jobName=jd.getJobDataMap().getString("jobName");
-        init(jd);
+        String groupName = jd.getJobDataMap().getString("groupName");
+        String jobName = jd.getJobDataMap().getString("jobName");
+        initParam(jd);
         //prepare current system timestamp
-        long currentblockStartTimestamp = setCurrentblockStartTimestamp(System.currentTimeMillis());
-        LOGGER.info("currentblockStartTimestamp: "+currentblockStartTimestamp);
-        if (StringUtils.isNotEmpty(sourcePattern)) {
-            sourcePatternItems = sourcePattern.split("-");
-            setDataConnectorPartitions(measure.getSource(), sourcePatternItems, partitionItems, currentblockStartTimestamp);
-        }
-        if (StringUtils.isNotEmpty(targetPattern)) {
-            targetPatternItems = targetPattern.split("-");
-            setDataConnectorPartitions(measure.getTarget(), targetPatternItems, partitionItems, currentblockStartTimestamp);
+        long currentBlockStartTimestamp = setCurrentBlockStartTimestamp(System.currentTimeMillis());
+        LOGGER.info("currentBlockStartTimestamp: {}", currentBlockStartTimestamp);
+        try {
+            if (StringUtils.isNotEmpty(sourcePattern))
+                setAllDataConnectorPartitions(measure.getDataSources(), sourcePattern.split("-"), partitionItems, "source", currentBlockStartTimestamp);
+            if (StringUtils.isNotEmpty(targetPattern))
+                setAllDataConnectorPartitions(measure.getDataSources(), targetPattern.split("-"), partitionItems, "target", currentBlockStartTimestamp);
+        } catch (Exception e) {
+            LOGGER.error("Can not execute job.Set partitions error. {}", e.getMessage());
+            return;
         }
-        jd.getJobDataMap().put("lastBlockStartTimestamp", currentblockStartTimestamp + "");
+        jd.getJobDataMap().put("lastBlockStartTimestamp", currentBlockStartTimestamp + "");
         setSparkJobDO();
-        String result = restTemplate.postForObject(uri, sparkJobDO, String.class);
+        String result;
+        try {
+            result = restTemplate.postForObject(uri, sparkJobDO, String.class);
+        } catch (Exception e) {
+            LOGGER.error("Post spark task error. {}", e.getMessage());
+            return;
+        }
         LOGGER.info(result);
-        saveJobInstance(groupName,jobName,result);
+        saveJobInstance(groupName, jobName, result);
     }
 
-    public void init(JobDetail jd){
-        //jd.getJobDataMap().getString()
+    private void initParam(JobDetail jd) {
         /**
          * the field measureId is generated from `setJobData` in `JobServiceImpl`
          */
         String measureId = jd.getJobDataMap().getString("measureId");
         measure = measureRepo.findOne(Long.valueOf(measureId));
-        if (measure==null) {
-            LOGGER.error("Measure with id " + measureId + " is not find!");
-            //if return here, livy uri won't be set, and will keep null for all measures even they are not null
+        if (measure == null) {
+            LOGGER.error("Measure with id {} is not find!", measureId);
+            return;
         }
-        String partitionItemstr = sparkJobProps.getProperty("sparkJob.dateAndHour");
-        partitionItems = partitionItemstr.split(",");
+        setMeasureInstanceName(measure, jd);
+        partitionItems = sparkJobProps.getProperty("sparkJob.dateAndHour").split(",");
         uri = sparkJobProps.getProperty("livy.uri");
         sourcePattern = jd.getJobDataMap().getString("sourcePattern");
         targetPattern = jd.getJobDataMap().getString("targetPattern");
         blockStartTimestamp = jd.getJobDataMap().getString("blockStartTimestamp");
         lastBlockStartTimestamp = jd.getJobDataMap().getString("lastBlockStartTimestamp");
-        LOGGER.info("lastBlockStartTimestamp:"+lastBlockStartTimestamp);
+        LOGGER.info("lastBlockStartTimestamp:{}", lastBlockStartTimestamp);
         interval = jd.getJobDataMap().getString("interval");
     }
 
+    private void setMeasureInstanceName(Measure measure, JobDetail jd) {
+        // in order to keep metric name unique, we set measure name as jobName at present
+        measure.setName(jd.getJobDataMap().getString("jobName"));
+    }
+
+    private void setAllDataConnectorPartitions(List<DataSource> sources, String[] patternItemSet, String[] partitionItems, String sourceName, long timestamp) {
+        if (sources == null)
+            return;
+        for (DataSource dataSource : sources) {
+            setDataSourcePartitions(dataSource, patternItemSet, partitionItems, sourceName, timestamp);
+        }
+    }
+
+    private void setDataSourcePartitions(DataSource dataSource, String[] patternItemSet, String[] partitionItems, String sourceName, long timestamp) {
+        String name = dataSource.getName();
+        for (DataConnector dataConnector : dataSource.getConnectors()) {
+            if (sourceName.equals(name))
+                setDataConnectorPartitions(dataConnector, patternItemSet, partitionItems, timestamp);
+        }
+    }
+
     private void setDataConnectorPartitions(DataConnector dc, String[] patternItemSet, String[] partitionItems, long timestamp) {
         Map<String, String> partitionItemMap = genPartitionMap(patternItemSet, partitionItems, timestamp);
         /**
@@ -141,67 +168,64 @@ public class SparkSubmitJob implements Job {
         try {
             dc.setConfig(configMap);
         } catch (JsonProcessingException e) {
-            LOGGER.error(""+e);
+            LOGGER.error(e.getMessage());
         }
     }
 
-    public Map<String, String> genPartitionMap(String[] patternItemSet, String[] partitionItems, long timestamp) {
+
+    private Map<String, String> genPartitionMap(String[] patternItemSet, String[] partitionItems, long timestamp) {
         /**
          * patternItemSet:{YYYYMMdd,HH}
          * partitionItems:{dt,hour}
          * partitionItemMap:{dt=20170804,hour=09}
          */
-        int comparableSizeMin=Math.min(patternItemSet.length,partitionItems.length);
+        int comparableSizeMin = Math.min(patternItemSet.length, partitionItems.length);
         Map<String, String> partitionItemMap = new HashMap<>();
         for (int i = 0; i < comparableSizeMin; i++) {
             /**
              * in order to get a standard date like 20170427 01 (YYYYMMdd-HH)
              */
-            String pattrn = patternItemSet[i].replace("mm", "MM");
-            pattrn = pattrn.replace("DD", "dd");
-            pattrn = pattrn.replace("hh", "HH");
-            SimpleDateFormat sdf = new SimpleDateFormat(pattrn);
+            String pattern = patternItemSet[i].replace("mm", "MM");
+            pattern = pattern.replace("DD", "dd");
+            pattern = pattern.replace("hh", "HH");
+            SimpleDateFormat sdf = new SimpleDateFormat(pattern);
             partitionItemMap.put(partitionItems[i], sdf.format(new Date(timestamp)));
         }
         return partitionItemMap;
     }
 
 
-    public long setCurrentblockStartTimestamp(long currentSystemTimestamp) {
-        long currentblockStartTimestamp=0;
+    private long setCurrentBlockStartTimestamp(long currentSystemTimestamp) {
+        long currentBlockStartTimestamp = 0;
         if (StringUtils.isNotEmpty(lastBlockStartTimestamp)) {
             try {
-                currentblockStartTimestamp = Long.parseLong(lastBlockStartTimestamp) + Integer.parseInt(interval) * 1000;
-            }catch (Exception e){
-                LOGGER.info("lastBlockStartTimestamp or interval format problem! "+e);
+                currentBlockStartTimestamp = Long.parseLong(lastBlockStartTimestamp) + Integer.parseInt(interval) * 1000;
+            } catch (Exception e) {
+                LOGGER.info("lastBlockStartTimestamp or interval format problem! {}", e.getMessage());
             }
         } else {
             if (StringUtils.isNotEmpty(blockStartTimestamp)) {
-                try{
-                    currentblockStartTimestamp = Long.parseLong(blockStartTimestamp);
-                }catch (Exception e){
-                    LOGGER.info("blockStartTimestamp format problem! "+e);
+                try {
+                    currentBlockStartTimestamp = Long.parseLong(blockStartTimestamp);
+                } catch (Exception e) {
+                    LOGGER.info("blockStartTimestamp format problem! {}", e.getMessage());
                 }
             } else {
-                currentblockStartTimestamp = currentSystemTimestamp;
+                currentBlockStartTimestamp = currentSystemTimestamp;
             }
         }
-        return currentblockStartTimestamp;
+        return currentBlockStartTimestamp;
     }
 
-    public void setSparkJobDO() {
+    private void setSparkJobDO() {
         sparkJobDO.setFile(sparkJobProps.getProperty("sparkJob.file"));
         sparkJobDO.setClassName(sparkJobProps.getProperty("sparkJob.className"));
 
-        List<String> args = new ArrayList<String>();
+        List<String> args = new ArrayList<>();
         args.add(sparkJobProps.getProperty("sparkJob.args_1"));
-        ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
-        String measureJson = "";
-        try {
-            measureJson = ow.writeValueAsString(measure);
-        } catch (JsonProcessingException e) {
-            e.printStackTrace();
-        }
+        // measure
+        String measureJson;
+        measureJson = GriffinUtil.toJson(measure);
         args.add(measureJson);  //partition
         args.add(sparkJobProps.getProperty("sparkJob.args_3"));
         sparkJobDO.setArgs(args);
@@ -213,8 +237,8 @@ public class SparkSubmitJob implements Job {
         sparkJobDO.setDriverMemory(sparkJobProps.getProperty("sparkJob.driverMemory"));
         sparkJobDO.setExecutorMemory(sparkJobProps.getProperty("sparkJob.executorMemory"));
 
-        Map<String,String> conf=new HashMap<String,String>();
-        conf.put("spark.jars.packages",sparkJobProps.getProperty("sparkJob.spark.jars.packages"));
+        Map<String, String> conf = new HashMap<>();
+        conf.put("spark.jars.packages", sparkJobProps.getProperty("sparkJob.spark.jars.packages"));
         sparkJobDO.setConf(conf);
 
         List<String> jars = new ArrayList<>();
@@ -227,25 +251,26 @@ public class SparkSubmitJob implements Job {
         sparkJobDO.setFiles(files);
     }
 
-    public void saveJobInstance(String groupName,String jobName,String result){
+    private void saveJobInstance(String groupName, String jobName, String result) {
         //save JobInstance info into DataBase
-        Map<String,Object> resultMap=new HashMap<String,Object>();
-        TypeReference<HashMap<String,Object>> type=new TypeReference<HashMap<String,Object>>(){};
+        Map<String, Object> resultMap = new HashMap<>();
+        TypeReference<HashMap<String, Object>> type = new TypeReference<HashMap<String, Object>>() {
+        };
         try {
-            resultMap= GriffinUtil.toEntity(result,type);
+            resultMap = GriffinUtil.toEntity(result, type);
         } catch (IOException e) {
-            LOGGER.error("jobInstance jsonStr convert to map failed. "+e);
+            LOGGER.error("jobInstance jsonStr convert to map failed. {}", e.getMessage());
         }
-        JobInstance jobInstance=new JobInstance();
-        if(resultMap!=null) {
+        JobInstance jobInstance = new JobInstance();
+        if (resultMap != null) {
             jobInstance.setGroupName(groupName);
             jobInstance.setJobName(jobName);
             try {
                 jobInstance.setSessionId(Integer.parseInt(resultMap.get("id").toString()));
                 jobInstance.setState(LivySessionStates.State.valueOf(resultMap.get("state").toString()));
                 jobInstance.setAppId(resultMap.get("appId").toString());
-            }catch (Exception e){
-                LOGGER.warn("jobInstance has null field. "+e);
+            } catch (Exception e) {
+                LOGGER.warn("jobInstance has null field. {}", e.getMessage());
             }
             jobInstance.setTimestamp(System.currentTimeMillis());
             jobInstanceRepo.save(jobInstance);

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/entity/JobInstance.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/JobInstance.java b/service/src/main/java/org/apache/griffin/core/job/entity/JobInstance.java
index ab90dd3..4521999 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/JobInstance.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/JobInstance.java
@@ -24,9 +24,6 @@ import org.apache.griffin.core.measure.entity.AuditableEntity;
 
 import javax.persistence.*;
 
-/**
- * Created by xiangrchen on 5/31/17.
- */
 @Entity
 public class JobInstance extends AuditableEntity {
 
@@ -36,12 +33,12 @@ public class JobInstance extends AuditableEntity {
     private String jobName;
     private int sessionId;
     @Enumerated(EnumType.STRING)
-    State state;
-    String appId;
+    private State state;
+    private String appId;
     @Lob
-    @Column(length=1024) //2^10=1024
+    @Column(length = 1024) //2^10=1024
     private String appUri;
-    long timestamp;
+    private long timestamp;
 
     public String getGroupName() {
         return groupName;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/entity/JobRequestBody.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/JobRequestBody.java b/service/src/main/java/org/apache/griffin/core/job/entity/JobRequestBody.java
index 796949e..dd28bf1 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/JobRequestBody.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/JobRequestBody.java
@@ -18,9 +18,6 @@ under the License.
 */
 package org.apache.griffin.core.job.entity;
 
-/**
- * Created by xiangrchen on 4/27/17.
- */
 public class JobRequestBody {
     private String sourcePattern;
     private String targetPattern;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java b/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
index 989bc44..5839fb5 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/LivySessionStates.java
@@ -21,9 +21,6 @@ package org.apache.griffin.core.job.entity;
 
 import com.cloudera.livy.sessions.SessionState;
 
-/**
- * Created by xiangrchen on 7/31/17.
- */
 public class LivySessionStates {
 
     /**
@@ -44,8 +41,8 @@ public class LivySessionStates {
         unknown
     }
 
-    public static SessionState toSessionState(State state){
-        switch (state){
+    public static SessionState toSessionState(State state) {
+        switch (state) {
             case not_started:
                 return new SessionState.NotStarted();
             case starting:
@@ -71,21 +68,21 @@ public class LivySessionStates {
         }
     }
 
-    public static boolean isActive(State state){
-        if (State.unknown.equals(state)){
+    public static boolean isActive(State state) {
+        if (State.unknown.equals(state)) {
             // set unknown isactive() as false.
             return false;
         }
-        SessionState sessionState=toSessionState(state);
-        if (sessionState==null){
+        SessionState sessionState = toSessionState(state);
+        if (sessionState == null) {
             return false;
-        }else {
+        } else {
             return sessionState.isActive();
         }
     }
 
-    public static boolean isHeathy(State state){
-        if (State.error.equals(state) || State.dead.equals(state) || State.shutting_down.equals(state)){
+    public static boolean isHeathy(State state) {
+        if (State.error.equals(state) || State.dead.equals(state) || State.shutting_down.equals(state)) {
             return false;
         }
         return true;

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/entity/SparkJobDO.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/entity/SparkJobDO.java b/service/src/main/java/org/apache/griffin/core/job/entity/SparkJobDO.java
index c447260..437cde7 100644
--- a/service/src/main/java/org/apache/griffin/core/job/entity/SparkJobDO.java
+++ b/service/src/main/java/org/apache/griffin/core/job/entity/SparkJobDO.java
@@ -24,39 +24,35 @@ import java.util.List;
 import java.util.Map;
 
 /**
- * Created by xiangrchen on 4/26/17.
- */
-
-/**
  * SparkJobDO
- {
- "file": "hdfs:///griffin/griffin-measure.jar",
- "className": "org.apache.griffin.measure.batch.Application",
- "args": [
- "/benchmark/test/env.json",
- "{\"name\":\"data_rdm\",\"type\":\"accuracy\",\"source\":{\"type\":\"hive\",\"version\":\"1.2\",\"config\":{\"database\":\"default\",\"table.name\":\"data_rdm\"} },\"target\":{\"type\":\"hive\",\"version\":\"1.2\",\"config\":{\"database\":\"default\",\"table.name\":\"data_rdm\"} },\"evaluateRule\":{\"sampleRatio\":1,\"rules\":\"$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1\"} }",
- "hdfs,raw"
- ],
- "name": "griffin-livy",
- "queue": "default",
- "numExecutors": 2,
- "executorCores": 4,
- "driverMemory": "2g",
- "executorMemory": "2g",
- "conf": {
- "spark.jars.packages": "com.databricks:spark-avro_2.10:2.0.1"
- },
- "jars": [
- "/livy/datanucleus-api-jdo-3.2.6.jar",
- "/livy/datanucleus-core-3.2.10.jar",
- "/livy/datanucleus-rdbms-3.2.9.jar"
- ],
- "files": [
- "/livy/hive-site.xml"
- ]
- }'
+ * {
+ * "file": "hdfs:///griffin/griffin-measure.jar",
+ * "className": "org.apache.griffin.measure.batch.Application",
+ * "args": [
+ * "/benchmark/test/env.json",
+ * "{\"name\":\"data_rdm\",\"type\":\"accuracy\",\"source\":{\"type\":\"hive\",\"version\":\"1.2\",\"config\":{\"database\":\"default\",\"table.name\":\"data_rdm\"} },\"target\":{\"type\":\"hive\",\"version\":\"1.2\",\"config\":{\"database\":\"default\",\"table.name\":\"data_rdm\"} },\"evaluateRule\":{\"sampleRatio\":1,\"rules\":\"$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1\"} }",
+ * "hdfs,raw"
+ * ],
+ * "name": "griffin-livy",
+ * "queue": "default",
+ * "numExecutors": 2,
+ * "executorCores": 4,
+ * "driverMemory": "2g",
+ * "executorMemory": "2g",
+ * "conf": {
+ * "spark.jars.packages": "com.databricks:spark-avro_2.10:2.0.1"
+ * },
+ * "jars": [
+ * "/livy/datanucleus-api-jdo-3.2.6.jar",
+ * "/livy/datanucleus-core-3.2.10.jar",
+ * "/livy/datanucleus-rdbms-3.2.9.jar"
+ * ],
+ * "files": [
+ * "/livy/hive-site.xml"
+ * ]
+ * }'
  */
-public class SparkJobDO implements Serializable{
+public class SparkJobDO implements Serializable {
 
     private String file;
 
@@ -76,7 +72,7 @@ public class SparkJobDO implements Serializable{
 
     private String executorMemory;
 
-    private Map<String,String> conf;
+    private Map<String, String> conf;
 
     private List<String> jars;
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java b/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
index 0bce562..d07b2b7 100644
--- a/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
+++ b/service/src/main/java/org/apache/griffin/core/job/repo/JobInstanceRepo.java
@@ -25,17 +25,15 @@ import org.springframework.data.jpa.repository.Modifying;
 import org.springframework.data.jpa.repository.Query;
 import org.springframework.data.repository.CrudRepository;
 import org.springframework.stereotype.Repository;
-import org.springframework.transaction.annotation.Transactional;
 
 import java.util.List;
 
 
 @Repository
-public interface JobInstanceRepo extends CrudRepository<JobInstance,Long>{
+public interface JobInstanceRepo extends CrudRepository<JobInstance, Long> {
     /**
-     *
-     * @param group is group name
-     * @param name is job name
+     * @param group    is group name
+     * @param name     is job name
      * @param pageable
      * @return all job instances scheduled at different time using the same prototype job,
      * the prototype job is determined by SCHED_NAME, group name and job name in table QRTZ_JOB_DETAILS.
@@ -49,18 +47,16 @@ public interface JobInstanceRepo extends CrudRepository<JobInstance,Long>{
             "where s.groupName= ?1 and s.jobName=?2 ")
     List<JobInstance> findByGroupNameAndJobName(String group, String name);
 
-    @Query("select DISTINCT s.groupName, s.jobName from JobInstance s")
+    @Query("select DISTINCT s.groupName, s.jobName, s.id from JobInstance s")
     List<Object> findGroupWithJobName();
 
-    @Transactional
     @Modifying
     @Query("delete from JobInstance s " +
             "where s.groupName= ?1 and s.jobName=?2 ")
-    void deleteByGroupAndjobName(String groupName, String jobName);
+    void deleteByGroupAndJobName(String groupName, String jobName);
 
-    @Transactional
     @Modifying
-    @Query("update JobInstance s "+
+    @Query("update JobInstance s " +
             "set s.state= ?2, s.appId= ?3, s.appUri= ?4 where s.id= ?1")
     void update(Long Id, LivySessionStates.State state, String appId, String appUri);
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/login/LoginController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginController.java b/service/src/main/java/org/apache/griffin/core/login/LoginController.java
index 7a5f5af..2e75a81 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginController.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginController.java
@@ -23,11 +23,12 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.core.env.Environment;
-import org.springframework.http.HttpStatus;
 import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.*;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RestController;
 
-import java.util.HashMap;
 import java.util.Map;
 
 @RestController

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/login/LoginService.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginService.java b/service/src/main/java/org/apache/griffin/core/login/LoginService.java
index 83b9c48..bdb5a64 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginService.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginService.java
@@ -23,13 +23,13 @@ import org.springframework.http.ResponseEntity;
 
 import java.util.Map;
 
-public interface LoginService  {
+public interface LoginService {
 
-    public ResponseEntity<Map<String, Object>> login(Map<String, String> map);
+    ResponseEntity<Map<String, Object>> login(Map<String, String> map);
 
-    public ResponseEntity<Map<String, Object>> loginDefault(Map<String, String> map);
+    ResponseEntity<Map<String, Object>> loginDefault(Map<String, String> map);
 
-    public ResponseEntity<Map<String, Object>> loginLDAP(Map<String, String> map);
+    ResponseEntity<Map<String, Object>> loginLDAP(Map<String, String> map);
 
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/login/LoginServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/login/LoginServiceImpl.java b/service/src/main/java/org/apache/griffin/core/login/LoginServiceImpl.java
index 7598feb..5f8a069 100644
--- a/service/src/main/java/org/apache/griffin/core/login/LoginServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/login/LoginServiceImpl.java
@@ -35,7 +35,9 @@ import javax.naming.directory.SearchControls;
 import javax.naming.directory.SearchResult;
 import javax.naming.ldap.InitialLdapContext;
 import javax.naming.ldap.LdapContext;
-import java.util.*;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Map;
 
 @Service
 public class LoginServiceImpl implements LoginService {
@@ -45,29 +47,31 @@ public class LoginServiceImpl implements LoginService {
     private Environment env;
 
     @Override
-    public ResponseEntity<Map<String, Object>> login(Map<String, String> map){
+    public ResponseEntity<Map<String, Object>> login(Map<String, String> map) {
         String strategy = env.getProperty("login.strategy");
-        switch (strategy){
-            case "ldap": return loginLDAP(map);
-            case "default":return loginDefault(map);
+        switch (strategy) {
+            case "ldap":
+                return loginLDAP(map);
+            case "default":
+                return loginDefault(map);
             default: {
                 LOGGER.error("Missing login strategy configuration");
-                return new ResponseEntity<Map<String, Object>>(new HashMap<String,Object>(), HttpStatus.NOT_FOUND);
+                return new ResponseEntity<Map<String, Object>>(new HashMap<String, Object>(), HttpStatus.NOT_FOUND);
             }
         }
     }
 
     @Override
-    public ResponseEntity<Map<String, Object>> loginDefault(Map<String, String> map){
+    public ResponseEntity<Map<String, Object>> loginDefault(Map<String, String> map) {
         String username = map.get("username");
         String password = map.get("password");
-        if(username == null || password == null){
+        if (username == null || password == null) {
             LOGGER.error("Missing default login input");
             return null;
         }
         String fullName = null;
-        if(username.equals("user")){
-            if(password.equals("test")){
+        if (username.equals("user")) {
+            if (password.equals("test")) {
                 fullName = "Default";
             }
         }
@@ -78,7 +82,7 @@ public class LoginServiceImpl implements LoginService {
     public ResponseEntity<Map<String, Object>> loginLDAP(Map<String, String> map) {
         String ntAccount = map.get("username");
         String password = map.get("password");
-        if(ntAccount == null || password == null){
+        if (ntAccount == null || password == null) {
             LOGGER.error("Missing ldap login input");
             return null;
         }
@@ -86,10 +90,10 @@ public class LoginServiceImpl implements LoginService {
         return getResponse(ntAccount, fullName);
     }
 
-    private String searchLDAP(String ntAccount, String password){
+    private String searchLDAP(String ntAccount, String password) {
         String domainComponent = env.getProperty("ldap.dc");
         Hashtable<String, String> ht = getLDAPEnvironmrnt(ntAccount, password);
-        if(domainComponent == null || ht == null){
+        if (domainComponent == null || ht == null) {
             return null;
         }
         LdapContext ctx;
@@ -106,7 +110,7 @@ public class LoginServiceImpl implements LoginService {
                 Attributes attrs = searchResult.getAttributes();
                 if (attrs != null && attrs.get("cn") != null) {
                     String cnName = (String) attrs.get("cn").get();
-                    if(cnName.indexOf("(") > 0){
+                    if (cnName.indexOf("(") > 0) {
                         fullName = cnName.substring(0, cnName.indexOf("("));
                     }
                 }
@@ -118,12 +122,12 @@ public class LoginServiceImpl implements LoginService {
         return null;
     }
 
-    private Hashtable<String, String> getLDAPEnvironmrnt(String ntAccount, String password){
+    private Hashtable<String, String> getLDAPEnvironmrnt(String ntAccount, String password) {
         String ldapUrl = env.getProperty("ldap.url");
         String domain = env.getProperty("ldap.domain");
         String connectTimeout = env.getProperty("ldap.connect-timeout");
         String readTimeout = env.getProperty("ldap.read-timeout");
-        if(ldapUrl == null || domain == null ||connectTimeout == null || readTimeout == null){
+        if (ldapUrl == null || domain == null || connectTimeout == null || readTimeout == null) {
             LOGGER.error("Missing ldap properties");
             return null;
         }
@@ -139,14 +143,14 @@ public class LoginServiceImpl implements LoginService {
         return ht;
     }
 
-    private ResponseEntity<Map<String,Object>> getResponse(String ntAccount, String fullName){
-        Map<String,Object> message = new HashMap<String,Object>();
-        if(fullName!=null){
+    private ResponseEntity<Map<String, Object>> getResponse(String ntAccount, String fullName) {
+        Map<String, Object> message = new HashMap<String, Object>();
+        if (fullName != null) {
             message.put("ntAccount", ntAccount);
             message.put("fullName", fullName);
             message.put("status", 0);
             return new ResponseEntity<Map<String, Object>>(message, HttpStatus.OK);
-        }else {
+        } else {
             return new ResponseEntity<Map<String, Object>>(message, HttpStatus.NOT_FOUND);
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
index 5ad8893..f93ce6a 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureController.java
@@ -32,30 +32,30 @@ import java.util.Map;
 
 public class MeasureController {
     @Autowired
-    MeasureService measureService;
+    private MeasureService measureService;
 
-    @RequestMapping(value = "/measures",method = RequestMethod.GET)
+    @RequestMapping(value = "/measures", method = RequestMethod.GET)
     public Iterable<Measure> getAllAliveMeasures() {
         return measureService.getAllAliveMeasures();
     }
 
-    @RequestMapping(value = "/measure/{id}",method = RequestMethod.GET)
+    @RequestMapping(value = "/measure/{id}", method = RequestMethod.GET)
     public Measure getMeasureById(@PathVariable("id") long id) {
         return measureService.getMeasureById(id);
     }
 
-    @RequestMapping(value = "/measure/{id}",method = RequestMethod.DELETE)
+    @RequestMapping(value = "/measure/{id}", method = RequestMethod.DELETE)
     public GriffinOperationMessage deleteMeasureById(@PathVariable("id") Long id) {
         return measureService.deleteMeasureById(id);
     }
 
-    @RequestMapping(value = "/measure",method = RequestMethod.PUT)
+    @RequestMapping(value = "/measure", method = RequestMethod.PUT)
     public GriffinOperationMessage updateMeasure(@RequestBody Measure measure) {
         return measureService.updateMeasure(measure);
     }
 
-    @RequestMapping(value = "/measures/owner/{owner}",method = RequestMethod.GET)
-    public List<Map<String, String>> getAllAliveMeasureNameIdByOwner(@PathVariable("owner") String owner){
+    @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET)
+    public List<Map<String, String>> getAllAliveMeasureNameIdByOwner(@PathVariable("owner") String owner) {
         return measureService.getAllAliveMeasureNameIdByOwner(owner);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java b/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
index 61b16b6..021af17 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/MeasureServiceImpl.java
@@ -27,7 +27,6 @@ import org.apache.griffin.core.util.GriffinOperationMessage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.scheduling.quartz.SchedulerFactoryBean;
 import org.springframework.stereotype.Service;
 import org.springframework.web.bind.annotation.PathVariable;
 import org.springframework.web.bind.annotation.RequestBody;
@@ -42,11 +41,10 @@ public class MeasureServiceImpl implements MeasureService {
     private static final Logger LOGGER = LoggerFactory.getLogger(MeasureServiceImpl.class);
 
     @Autowired
-    JobServiceImpl jobService;
+    private JobServiceImpl jobService;
     @Autowired
     private MeasureRepo measureRepo;
-    @Autowired
-    private SchedulerFactoryBean factory;
+
     @Override
     public Iterable<Measure> getAllAliveMeasures() {
         return measureRepo.findByDeleted(false);
@@ -57,12 +55,9 @@ public class MeasureServiceImpl implements MeasureService {
         return measureRepo.findOne(id);
     }
 
-
-
-
     @Override
     public GriffinOperationMessage deleteMeasureById(Long measureId) {
-        if (measureRepo.exists(measureId) == false) {
+        if (!measureRepo.exists(measureId)) {
             return GriffinOperationMessage.RESOURCE_NOT_FOUND;
         } else {
             //pause all jobs related to the measure
@@ -74,18 +69,23 @@ public class MeasureServiceImpl implements MeasureService {
         }
     }
 
-
     @Override
     public GriffinOperationMessage createMeasure(Measure measure) {
         List<Measure> aliveMeasureList = measureRepo.findByNameAndDeleted(measure.getName(), false);
         if (aliveMeasureList.size() == 0) {
-            if (measureRepo.save(measure) != null)
-                return GriffinOperationMessage.CREATE_MEASURE_SUCCESS;
-            else {
+            try {
+                if (measureRepo.save(measure) != null)
+                    return GriffinOperationMessage.CREATE_MEASURE_SUCCESS;
+                else {
+                    return GriffinOperationMessage.CREATE_MEASURE_FAIL;
+                }
+            } catch (Exception e) {
+                LOGGER.info("Failed to create new measure {}.{}", measure.getName(), e.getMessage());
                 return GriffinOperationMessage.CREATE_MEASURE_FAIL;
             }
+
         } else {
-            LOGGER.warn("Failed to create new measure " + measure.getName() + ", it already exists");
+            LOGGER.info("Failed to create new measure {}, it already exists.", measure.getName());
             return GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE;
         }
     }
@@ -93,7 +93,7 @@ public class MeasureServiceImpl implements MeasureService {
     @Override
     public List<Map<String, String>> getAllAliveMeasureNameIdByOwner(String owner) {
         List<Map<String, String>> res = new ArrayList<>();
-        for(Measure measure: measureRepo.findByOwnerAndDeleted(owner, false)){
+        for (Measure measure : measureRepo.findByOwnerAndDeleted(owner, false)) {
             HashMap<String, String> map = new HashMap<>();
             map.put("name", measure.getName());
             map.put("id", measure.getId().toString());
@@ -103,10 +103,16 @@ public class MeasureServiceImpl implements MeasureService {
     }
 
     public GriffinOperationMessage updateMeasure(@RequestBody Measure measure) {
-        if (measureRepo.exists(measure.getId()) == false) {
+        if (!measureRepo.exists(measure.getId())) {
             return GriffinOperationMessage.RESOURCE_NOT_FOUND;
         } else {
-            measureRepo.save(measure);
+            try {
+                measureRepo.save(measure);
+            } catch (Exception e) {
+                LOGGER.error("Failed to update measure. {}", e.getMessage());
+                return GriffinOperationMessage.UPDATE_MEASURE_FAIL;
+            }
+
             return GriffinOperationMessage.UPDATE_MEASURE_SUCCESS;
         }
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/main/java/org/apache/griffin/core/measure/entity/AuditableEntity.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/measure/entity/AuditableEntity.java b/service/src/main/java/org/apache/griffin/core/measure/entity/AuditableEntity.java
index 9639f9c..546bad9 100644
--- a/service/src/main/java/org/apache/griffin/core/measure/entity/AuditableEntity.java
+++ b/service/src/main/java/org/apache/griffin/core/measure/entity/AuditableEntity.java
@@ -35,7 +35,7 @@ public abstract class AuditableEntity implements Serializable {
 //    private static final long serialVersionUID = 1L;
 
     @Id
-    @GeneratedValue(strategy=GenerationType.AUTO)
+    @GeneratedValue(strategy = GenerationType.AUTO)
     private Long id;
 
     @JsonIgnore
@@ -52,11 +52,21 @@ public abstract class AuditableEntity implements Serializable {
         this.id = id;
     }
 
-    public Timestamp getCreatedDate() { return createdDate; }
-    public void setCreatedDate(Timestamp createdDate) { this.createdDate = createdDate; }
+    public Timestamp getCreatedDate() {
+        return createdDate;
+    }
+
+    public void setCreatedDate(Timestamp createdDate) {
+        this.createdDate = createdDate;
+    }
 
-    public Timestamp getModifiedDate() { return modifiedDate; } 
-    public void setModifiedDate(Timestamp modifiedDate) { this.modifiedDate = modifiedDate; }
+    public Timestamp getModifiedDate() {
+        return modifiedDate;
+    }
+
+    public void setModifiedDate(Timestamp modifiedDate) {
+        this.modifiedDate = modifiedDate;
+    }
 
     @Override
     public int hashCode() {