You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2017/10/09 07:12:42 UTC
[2/4] incubator-griffin git commit: change json format,
update unit test and fix hive connect
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
index a77dc69..6efa102 100644
--- a/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/JobServiceImplTest.java
@@ -19,6 +19,7 @@ under the License.
package org.apache.griffin.core.job;
+import org.apache.griffin.core.error.exception.GriffinException;
import org.apache.griffin.core.job.entity.JobHealth;
import org.apache.griffin.core.job.entity.JobInstance;
import org.apache.griffin.core.job.entity.JobRequestBody;
@@ -30,10 +31,8 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.quartz.*;
+import org.quartz.impl.JobDetailImpl;
import org.quartz.impl.matchers.GroupMatcher;
-import org.quartz.impl.triggers.CronTriggerImpl;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
@@ -44,28 +43,26 @@ import org.springframework.data.domain.Sort;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.test.context.junit4.SpringRunner;
-import java.io.Serializable;
import java.util.*;
-import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.fail;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createJobDetail;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.given;
-import static org.quartz.JobBuilder.newJob;
+import static org.quartz.TriggerBuilder.newTrigger;
@RunWith(SpringRunner.class)
public class JobServiceImplTest {
- private static final Logger log = LoggerFactory.getLogger(JobServiceImplTest.class);
@TestConfiguration
- public static class SchedulerServiceConfiguration{
+ public static class SchedulerServiceConfiguration {
@Bean
- public JobServiceImpl service(){
+ public JobServiceImpl service() {
return new JobServiceImpl();
}
+
@Bean
- public SchedulerFactoryBean factoryBean(){
+ public SchedulerFactoryBean factoryBean() {
return new SchedulerFactoryBean();
}
}
@@ -78,145 +75,123 @@ public class JobServiceImplTest {
private SchedulerFactoryBean factory;
@Autowired
- private JobServiceImpl service;
+ public JobServiceImpl service;
@Before
- public void setup(){
+ public void setup() {
}
@Test
- public void testGetJobs(){
+ public void testGetAliveJobs() throws SchedulerException {
+ Scheduler scheduler = Mockito.mock(Scheduler.class);
+ JobDetailImpl jobDetail = createJobDetail();
+ given(factory.getObject()).willReturn(scheduler);
+ given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("group"));
+ HashSet<JobKey> set = new HashSet<JobKey>() {{
+ add(new JobKey("name", "group"));
+ }};
+ given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("group"))).willReturn(set);
+ Trigger trigger = newTrigger().withIdentity(TriggerKey.triggerKey("name", "group")).
+ withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInSeconds(3000).repeatForever())
+ .startAt(new Date()).build();
+ List<Trigger> triggers = Arrays.asList(trigger);
+ JobKey jobKey = set.iterator().next();
+ given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(triggers);
+ given(scheduler.getJobDetail(jobKey)).willReturn(jobDetail);
+ assertEquals(service.getAliveJobs().size(), 1);
+
+ // trigger is empty
+ given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(Arrays.asList());
+ assertEquals(service.getAliveJobs().size(), 0);
+
+ // schedule exception
+ GriffinException.GetJobsFailureException exception = null;
try {
- Scheduler scheduler=Mockito.mock(Scheduler.class);
- given(factory.getObject()).willReturn(scheduler);
- List<Map<String, Serializable>> tmp = service.getAliveJobs();
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot get all jobs info from dbs");
+ given(scheduler.getTriggersOfJob(jobKey)).willThrow(new GriffinException.GetJobsFailureException());
+ service.getAliveJobs();
+ } catch (GriffinException.GetJobsFailureException e) {
+ exception = e;
}
- }
+ assertTrue(exception != null);
- @Test
- public void testSetJobsByKey(){
- try {
- List<Map<String, Serializable>> list = new ArrayList<Map<String, Serializable>>();
- Scheduler scheduler = Mockito.mock(Scheduler.class);
- JobKey jobKey = new JobKey("TEST");
- List<Trigger> triggers = new ArrayList<Trigger>();
- Trigger trigger = new CronTriggerImpl();
- triggers.add(trigger);
- given((List<Trigger>) scheduler.getTriggersOfJob(jobKey)).willReturn(triggers);
-
- JobDetail jd = Mockito.mock(JobDetail.class);
- given(scheduler.getJobDetail(jobKey)).willReturn(jd);
-
- JobDataMap jobDataMap = Mockito.mock(JobDataMap.class);
- given(jd.getJobDataMap()).willReturn(jobDataMap);
-
- // service.setJobsByKey(list,scheduler,jobKey);
- } catch (SchedulerException e) {
- fail("can't set jobs by key.");
- }
}
@Test
- public void testAddJob(){
- try {
- String groupName="BA";
- String jobName="job1";
- long measureId=0;
- JobRequestBody jobRequestBody =new JobRequestBody();
- Scheduler scheduler=Mockito.mock(Scheduler.class);
- given(factory.getObject()).willReturn(scheduler);
- GriffinOperationMessage tmp = service.addJob(groupName,jobName,measureId, jobRequestBody);
- assertEquals(tmp,GriffinOperationMessage.CREATE_JOB_FAIL);
- assertTrue(true);
-
- JobRequestBody jobRequestBody1 =new JobRequestBody("YYYYMMdd-HH","YYYYMMdd-HH",
- System.currentTimeMillis()+"",System.currentTimeMillis()+"","1000");
- Scheduler scheduler1=Mockito.mock(Scheduler.class);
- given(factory.getObject()).willReturn(scheduler1);
- GriffinOperationMessage tmp1 = service.addJob(groupName,jobName,measureId, jobRequestBody1);
- assertEquals(tmp1,GriffinOperationMessage.CREATE_JOB_SUCCESS);
- }catch (Throwable t){
- fail("Cannot add job ");
- }
+ public void testAddJob() {
+ String groupName = "BA";
+ String jobName = "job1";
+ long measureId = 0;
+ JobRequestBody jobRequestBody = new JobRequestBody();
+ Scheduler scheduler = Mockito.mock(Scheduler.class);
+ given(factory.getObject()).willReturn(scheduler);
+ assertEquals(service.addJob(groupName, jobName, measureId, jobRequestBody), GriffinOperationMessage.CREATE_JOB_FAIL);
+
+ JobRequestBody jobRequestBody1 = new JobRequestBody("YYYYMMdd-HH", "YYYYMMdd-HH",
+ System.currentTimeMillis() + "", System.currentTimeMillis() + "", "1000");
+ Scheduler scheduler1 = Mockito.mock(Scheduler.class);
+ given(factory.getObject()).willReturn(scheduler1);
+ assertEquals(service.addJob(groupName, jobName, measureId, jobRequestBody1), GriffinOperationMessage.CREATE_JOB_SUCCESS);
}
@Test
- public void testDeleteJob() {
- String groupName="BA";
- String jobName="job1";
- JobKey jobKey=new JobKey(jobName, groupName);
- JobDetail jobDetail = newJob(SparkSubmitJob.class)
- .storeDurably()
- .withIdentity(jobKey)
- .build();
- JobRequestBody jobRequestBody=new JobRequestBody("YYYYMMdd-HH", "YYYYMMdd-HH", null, "1503158400000", "50");
- service.setJobData(jobDetail, jobRequestBody, 0L, groupName, jobName);
- Scheduler scheduler=Mockito.mock(Scheduler.class);
+ public void testDeleteJob() throws SchedulerException {
+ String groupName = "BA";
+ String jobName = "job1";
+ Scheduler scheduler = Mockito.mock(Scheduler.class);
+ // DELETE_JOB_SUCCESS
given(factory.getObject()).willReturn(scheduler);
- try {
- given(scheduler.getJobDetail(jobKey)).willReturn(jobDetail);
- } catch (SchedulerException e) {
- fail("fail to return jobDetail for scheduler.getJobDetail(jobKey)");
- }
+ given(scheduler.getJobDetail(new JobKey(jobName,groupName))).willReturn(createJobDetail());
+ assertEquals(service.deleteJob(groupName, jobName), GriffinOperationMessage.DELETE_JOB_SUCCESS);
- GriffinOperationMessage tmp = service.deleteJob(groupName,jobName);
- assertThat(tmp).isEqualTo(GriffinOperationMessage.DELETE_JOB_SUCCESS);
+ // DELETE_JOB_FAIL
given(factory.getObject()).willThrow(SchedulerException.class);
- tmp = service.deleteJob(groupName,jobName);
- assertThat(tmp).isEqualTo(GriffinOperationMessage.DELETE_JOB_FAIL);
+ assertEquals(service.deleteJob(groupName, jobName), GriffinOperationMessage.DELETE_JOB_FAIL);
}
@Test
- public void testFindInstancesOfJob(){
- try {
- String groupName="BA";
- String jobName="job1";
- int page=0;
- int size=2;
- List<JobInstance> tmp = service.findInstancesOfJob(groupName,jobName,page,size);
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot find instances of Job");
- }
+ public void testFindInstancesOfJob() {
+ String groupName = "BA";
+ String jobName = "job1";
+ int page = 0;
+ int size = 2;
+ JobInstance jobInstance = new JobInstance(groupName, jobName, 1, LivySessionStates.State.dead, "app_id", "app_uri", System.currentTimeMillis());
+ Pageable pageRequest = new PageRequest(page, size, Sort.Direction.DESC, "timestamp");
+ given(jobInstanceRepo.findByGroupNameAndJobName(groupName, jobName, pageRequest)).willReturn(Arrays.asList(jobInstance));
+ assertEquals(service.findInstancesOfJob(groupName, jobName, page, size).size(),1);
}
@Test
- public void testGetHealthInfo(){
- try {
- Scheduler scheduler=Mockito.mock(Scheduler.class);
- given(factory.getObject()).willReturn(scheduler);
- given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("BA"));
- JobKey jobKey= new JobKey("TEST");
- Set<JobKey> jobKeySet=new HashSet<JobKey>();
- jobKeySet.add(jobKey);
- given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("BA"))).willReturn((jobKeySet));
-
- Pageable pageRequest=new PageRequest(0,1, Sort.Direction.DESC,"timestamp");
- List<JobInstance> scheduleStateList=new ArrayList<JobInstance>();
- JobInstance jobInstance=new JobInstance();
- jobInstance.setGroupName("BA");
- jobInstance.setJobName("job1");
- jobInstance.setSessionId(1);
- jobInstance.setState(LivySessionStates.State.starting);
- jobInstance.setAppId("ttt");
- jobInstance.setTimestamp(System.currentTimeMillis());
- scheduleStateList.add(jobInstance);
- given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(),jobKey.getName(),pageRequest)).willReturn(scheduleStateList);
- JobHealth tmp = service.getHealthInfo();
- assertTrue(true);
-
- scheduleStateList.remove(0);
- jobInstance.setState(LivySessionStates.State.success);
- scheduleStateList.add(jobInstance);
- given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(),jobKey.getName(),pageRequest)).willReturn(scheduleStateList);
- JobHealth tmp1 = service.getHealthInfo();
- }catch (Throwable t){
- fail("Cannot get Health info "+t);
- }
+ public void testGetHealthInfo() throws SchedulerException {
+ Scheduler scheduler = Mockito.mock(Scheduler.class);
+ given(factory.getObject()).willReturn(scheduler);
+ given(scheduler.getJobGroupNames()).willReturn(Arrays.asList("BA"));
+ JobKey jobKey = new JobKey("test");
+ Set<JobKey> jobKeySet = new HashSet<>();
+ jobKeySet.add(jobKey);
+ given(scheduler.getJobKeys(GroupMatcher.jobGroupEquals("BA"))).willReturn((jobKeySet));
+
+ Pageable pageRequest = new PageRequest(0, 1, Sort.Direction.DESC, "timestamp");
+ List<JobInstance> scheduleStateList = new ArrayList<>();
+ JobInstance jobInstance = new JobInstance();
+ jobInstance.setGroupName("BA");
+ jobInstance.setJobName("job1");
+ jobInstance.setSessionId(1);
+ jobInstance.setState(LivySessionStates.State.starting);
+ jobInstance.setAppId("app_id");
+ jobInstance.setTimestamp(System.currentTimeMillis());
+ scheduleStateList.add(jobInstance);
+ given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(), jobKey.getName(), pageRequest)).willReturn(scheduleStateList);
+ JobHealth health1 = service.getHealthInfo();
+ assertEquals(health1.getHealthyJobCount(),1);
+
+ scheduleStateList.remove(0);
+ jobInstance.setState(LivySessionStates.State.error);
+ scheduleStateList.add(jobInstance);
+ given(jobInstanceRepo.findByGroupNameAndJobName(jobKey.getGroup(), jobKey.getName(), pageRequest)).willReturn(scheduleStateList);
+ JobHealth health2 = service.getHealthInfo();
+ assertEquals(health2.getHealthyJobCount(),0);
}
+
}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
index d748b01..60ddf3b 100644
--- a/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
+++ b/service/src/test/java/org/apache/griffin/core/job/SparkSubmitJobTest.java
@@ -7,7 +7,7 @@ to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
- http:www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
@@ -19,209 +19,80 @@ under the License.
package org.apache.griffin.core.job;
-import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.griffin.core.job.entity.JobInstance;
import org.apache.griffin.core.job.entity.SparkJobDO;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
-import org.apache.griffin.core.measure.entity.Measure;
+import org.apache.griffin.core.job.repo.JobInstanceRepo;
import org.apache.griffin.core.measure.repo.MeasureRepo;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
+import org.apache.griffin.core.util.GriffinUtil;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.context.annotation.Bean;
-import org.springframework.core.io.ClassPathResource;
-import org.springframework.core.io.ResourceLoader;
-import org.springframework.test.context.jdbc.Sql;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.web.client.RestTemplate;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.net.URL;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
import java.util.Properties;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createJobDetail;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.mock;
@RunWith(SpringRunner.class)
-public class SparkSubmitJobTest{
+public class SparkSubmitJobTest {
+ @TestConfiguration
+ public static class SchedulerServiceConfiguration {
+ @Bean
+ public SparkSubmitJob sparkSubmitJobBean() {
+ return new SparkSubmitJob();
+ }
- @InjectMocks
- private SparkSubmitJob ssj=new SparkSubmitJob();
-
- @Mock
- private MeasureRepo measureRepo;
-
- @Mock
- private Properties sparkJobProps;
- @Mock
- private RestTemplate restTemplate;
- /* @Before
- public void setUp() throws IOException {
- ssj=new SparkSubmitJob();
- ssj.measureRepo=mock(MeasureRepo.class);
- ssj.restTemplate= mock(RestTemplate.class);
- }*/
-
-/* @Test
- public void test_execute() throws Exception {
- JobExecutionContext context=mock(JobExecutionContext.class);
- JobDetail jd = mock(JobDetail.class);
- when(context.getJobDetail()).thenReturn(jd);
-
- JobDataMap jdmap = mock(JobDataMap.class);
- when(jd.getJobDataMap()).thenReturn(jdmap);
-
- when(jdmap.getString("measure")).thenReturn("bevssoj");
- when(jdmap.getString("sourcePat")).thenReturn("YYYYMMDD-HH");
- when(jdmap.getString("targetPat")).thenReturn("YYYYMMDD-HH");
- when(jdmap.getString("dataStartTimestamp")).thenReturn("1460174400000");
- when(jdmap.getString("lastTime")).thenReturn("");
- when(jdmap.getString("periodTime")).thenReturn("10");
- Measure measure = createATestMeasure("viewitem_hourly","bullseye");
- when(ssj.measureRepo.findByName("bevssoj")).thenReturn(measure);
-
- RestTemplate restTemplate =Mockito.mock(RestTemplate.class);
- PowerMockito.whenNew(RestTemplate.class).withAnyArguments().thenReturn(restTemplate);
- String uri=ssj.uri;
- SparkJobDO sparkJobDO= Mockito.mock(SparkJobDO.class);
- PowerMockito.when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
- when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
- ssj.execute(context);
-
- long currentSystemTimestamp=System.currentTimeMillis();
- long currentTimstamp = ssj.setCurrentTimestamp(currentSystemTimestamp);
-
- verify(ssj.measureRepo).findByName("bevssoj");
- verify(jdmap,atLeast(2)).put("lastTime",currentTimstamp+"");
-
- when(ssj.measureRepo.findByName("bevssoj")).thenReturn(null);
- ssj.execute(context);
-
- when(ssj.measureRepo.findByName("bevssoj")).thenReturn(measure);
- String result="{\"id\":8718,\"state\":\"starting\",\"appId\":null,\"appInfo\":{\"driverLogUrl\":null,\"sparkUiUrl\":null},\"log\":[]}";
- when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(result);
- ssj.execute(context);
- }*/
- @Test
- public void test_execute() throws Exception {
- JobExecutionContext context=mock(JobExecutionContext.class);
- JobDetail jd = mock(JobDetail.class);
- when(context.getJobDetail()).thenReturn(jd);
- JobDataMap jdmap = mock(JobDataMap.class);
- when(jd.getJobDataMap()).thenReturn(jdmap);
- when(jdmap.getString("measureId")).thenReturn("0");
- when(jdmap.getString("sourcePat")).thenReturn("YYYYMMDD-HH");
- when(jdmap.getString("targetPat")).thenReturn("YYYYMMDD-HH");
- when(jdmap.getString("dataStartTimestamp")).thenReturn("1460174400000");
- when(jdmap.getString("lastTime")).thenReturn("");
- when(jdmap.getString("periodTime")).thenReturn("10");
- Measure measure = createATestMeasure("viewitem_hourly","bullseye");
- when(measureRepo.findOne(Long.valueOf("0"))).thenReturn(measure);
- Properties sparkJobProperties=new Properties();
- sparkJobProperties.load(new FileInputStream(new ClassPathResource("sparkJob.properties").getFile()));
- when(sparkJobProps.getProperty("sparkJob.dateAndHour")).thenReturn(sparkJobProperties.getProperty("sparkJob.dateAndHour"));
- when(sparkJobProps.getProperty("sparkJob.numExecutors")).thenReturn(sparkJobProperties.getProperty("sparkJob.numExecutors"));
- when(sparkJobProps.getProperty("sparkJob.executorCores")).thenReturn(sparkJobProperties.getProperty("sparkJob.executorCores"));
- when(sparkJobProps.getProperty("sparkJob.driverMemory")).thenReturn(sparkJobProperties.getProperty("sparkJob.driverMemory"));
- when(sparkJobProps.getProperty("sparkJob.executorMemory")).thenReturn(sparkJobProperties.getProperty("sparkJob.executorMemory"));
- String uri=sparkJobProperties.getProperty("livy.uri");
- when(sparkJobProps.getProperty("livy.uri")).thenReturn(uri);
- SparkJobDO sparkJobDO= mock(SparkJobDO.class);
- when(restTemplate.postForObject(uri, sparkJobDO, String.class)).thenReturn(null);
- ssj.execute(context);
+ @Bean
+ public Properties sparkJobProps() {
+ return GriffinUtil.getProperties("/sparkJob.properties");
+ }
}
- private Measure createATestMeasure(String name,String org)throws IOException,Exception{
- HashMap<String,String> configMap1=new HashMap<>();
- configMap1.put("database","default");
- configMap1.put("table.name","test_data_src");
- HashMap<String,String> configMap2=new HashMap<>();
- configMap2.put("database","default");
- configMap2.put("table.name","test_data_tgt");
- String configJson1 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap1);
- String configJson2 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap2);
+ @Autowired
+ private SparkSubmitJob sparkSubmitJob;
- DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
- DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
- String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
+ @MockBean
+ private MeasureRepo measureRepo;
- EvaluateRule eRule = new EvaluateRule(1,rules);
+ @MockBean
+ private RestTemplate restTemplate;
- Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
+ @MockBean
+ private JobInstanceRepo jobInstanceRepo;
- return measure;
+ @Before
+ public void setUp() {
}
@Test
- public void test_genPartitions(){
- String[] patternItemSet={"YYYYMMDD","HH"};
- String[] partitionItemSet={"date","hour"};
- long timestamp=System.currentTimeMillis();
- Map<String,String> par=ssj.genPartitionMap(patternItemSet,partitionItemSet,timestamp);
- Map<String,String> verifyMap=new HashMap<>();
- SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMdd");
- verifyMap.put("date",sdf.format(new Date(timestamp)));
- SimpleDateFormat sdf1 = new SimpleDateFormat("HH");
- verifyMap.put("hour",sdf1.format(new Date(timestamp)));
- assertEquals(verifyMap,par);
- }
+ public void testExecute() throws Exception {
+ String livyUri = null;
+ String result = "{\"id\":1,\"state\":\"starting\",\"appId\":null,\"appInfo\":{\"driverLogUrl\":null,\"sparkUiUrl\":null},\"log\":[]}";
+ JobExecutionContext context = mock(JobExecutionContext.class);
+ JobDetail jd = createJobDetail();
+ given(context.getJobDetail()).willReturn(jd);
+ given(measureRepo.findOne(Long.valueOf(jd.getJobDataMap().getString("measureId")))).willReturn(createATestMeasure("view_item_hourly", "ebay"));
+ given(restTemplate.postForObject(livyUri, new SparkJobDO(), String.class)).willReturn(result);
+ given(jobInstanceRepo.save(new JobInstance())).willReturn(new JobInstance());
+ sparkSubmitJob.execute(context);
+ assertTrue(true);
- /* @Test
- public void test_setDataConnectorPartitions(){
- DataConnector dc=mock(DataConnector.class);
- String[] patternItemSet={"YYYYMMDD","HH"};
- String[] partitionItemSet={"date","hour"};
- long timestamp=System.currentTimeMillis();
- ssj.setDataConnectorPartitions(dc,patternItemSet,partitionItemSet,timestamp);
- Map<String,String> map=new HashMap<>();
- SimpleDateFormat sdf = new SimpleDateFormat("YYYYMMdd");
- SimpleDateFormat sdf1 = new SimpleDateFormat("HH");
- map.put("partitions","date="+sdf.format(new Date(timestamp))+", hour="+sdf1.format(new Date(timestamp)));
- try {
- verify(dc).setConfig(map);
- } catch (JsonProcessingException e) {
- e.printStackTrace();
- }
- }*/
-
-/* @Test
- public void test_setCurrentTimestamp(){
- long timestamp=System.currentTimeMillis();
- ssj.eachJoblastTimestamp="";
- System.out.println(ssj.setCurrentTimestamp(timestamp));
- ssj.eachJoblastTimestamp=(timestamp-1000)+"";
- ssj.periodTime="1000";
- System.out.println(ssj.setCurrentTimestamp(timestamp));
- }*/
- @Test
- public void test_setSparkJobDO(){
- ssj=mock(SparkSubmitJob.class);
- doNothing().when(ssj).setSparkJobDO();
}
+
}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
index e26bf47..66ebb59 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureControllerTest.java
@@ -19,15 +19,12 @@ under the License.
package org.apache.griffin.core.measure;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.util.GriffinOperationMessage;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
-import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
@@ -35,9 +32,9 @@ import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
-import java.io.IOException;
import java.util.*;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
import static org.hamcrest.CoreMatchers.is;
import static org.mockito.BDDMockito.given;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
@@ -45,7 +42,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@RunWith(SpringRunner.class)
-@WebMvcTest(value=MeasureController.class,secure = false)
+@WebMvcTest(value = MeasureController.class, secure = false)
public class MeasureControllerTest {
@Autowired
private MockMvc mvc;
@@ -53,125 +50,122 @@ public class MeasureControllerTest {
@MockBean
private MeasureService service;
+
@Before
- public void setup(){
+ public void setup() {
+
}
@Test
- public void testGetAllMeasures() throws IOException,Exception{
- Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-
+ public void testGetAllMeasures() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
given(service.getAllAliveMeasures()).willReturn(Arrays.asList(measure));
-
mvc.perform(get("/measures").contentType(MediaType.APPLICATION_JSON))
-// .andDo(print())
.andExpect(status().isOk())
- .andExpect(jsonPath("$.[0].name",is("viewitem_hourly")))
- ;
+ .andExpect(jsonPath("$.[0].name", is("view_item_hourly")));
}
@Test
- public void testGetMeasuresById() throws IOException,Exception{
- Measure measure = createATestMeasure("viewitem_hourly","bullseye");
-
+ public void testGetMeasuresById() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
given(service.getMeasureById(1L)).willReturn(measure);
-
mvc.perform(get("/measure/1").contentType(MediaType.APPLICATION_JSON))
-// .andDo(print())
.andExpect(status().isOk())
- .andExpect(jsonPath("$.name",is("viewitem_hourly")))
+ .andExpect(jsonPath("$.name", is("view_item_hourly")))
;
}
-
@Test
- public void testDeleteMeasuresById() throws Exception{
+ public void testDeleteMeasuresById() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ // RESOURCE_NOT_FOUND
+ given(service.deleteMeasureById(1L)).willReturn(GriffinOperationMessage.RESOURCE_NOT_FOUND);
+ mvc.perform(delete("/measure/1").contentType(MediaType.APPLICATION_JSON))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.description", is("Resource Not Found")))
+ .andExpect(jsonPath("$.code", is(400)));
+
+ // DELETE_MEASURE_BY_ID_SUCCESS
given(service.deleteMeasureById(1L)).willReturn(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS);
mvc.perform(delete("/measure/1").contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
- .andExpect(jsonPath("$.code", is(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS.getCode())))
- .andExpect(jsonPath("$.description", is(GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS.getDescription())))
- ;
+ .andExpect(jsonPath("$.description", is("Delete Measures By Name Succeed")))
+ .andExpect(jsonPath("$.code", is(202)));
}
-/* @Test
- public void testDeleteMeasuresByName() throws Exception{
- given(service.deleteMeasureById(0L)).willReturn(GriffinOperationMessage.DELETE_MEASURE_BY_NAME_SUCCESS);
- mvc.perform(delete("/measures/deleteByName/"+measureName).contentType(MediaType.APPLICATION_JSON))
+ @Test
+ public void testUpdateMeasure() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ ObjectMapper mapper = new ObjectMapper();
+ String measureJson = mapper.writeValueAsString(measure);
+
+ // RESOURCE_NOT_FOUND
+ given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.RESOURCE_NOT_FOUND);
+ mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isOk())
- .andExpect(jsonPath("$",is("DELETE_MEASURE_BY_NAME_SUCCESS")))
- ;
- }*/
+ .andExpect(jsonPath("$.description", is("Resource Not Found")))
+ .andExpect(jsonPath("$.code", is(400)));
- @Test
- public void testUpdateMeasure() throws Exception{
- String measureName="viewitem_hourly";
- String org="bullseye";
- Measure measure=createATestMeasure(measureName,org);
- ObjectMapper mapper=new ObjectMapper();
- String measureJson=mapper.writeValueAsString(measure);
- given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
+ // UPDATE_MEASURE_FAIL
+ given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_FAIL);
+ mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.description", is("Update Measure Failed")))
+ .andExpect(jsonPath("$.code", is(404)));
+ // UPDATE_MEASURE_SUCCESS
+ given(service.updateMeasure(measure)).willReturn(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
mvc.perform(put("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isOk())
- .andExpect(jsonPath("$.description",is(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS.getDescription())))
- .andExpect(jsonPath("$.code", is(GriffinOperationMessage.UPDATE_MEASURE_SUCCESS.getCode())))
- ;
+ .andExpect(jsonPath("$.description", is("Update Measure Succeed")))
+ .andExpect(jsonPath("$.code", is(204)));
}
@Test
- public void testGetAllAliveMeasureNameIdByOwner() throws Exception{
- String Owner="test1";
- List<Map<String, String>> measureList=new LinkedList<>();
+ public void testGetAllMeasuresOfOwner() throws Exception {
+ String owner = "test";
+ List<Map<String, String>> measureList = new LinkedList<>();
HashMap<String, String> map = new HashMap<>();
- map.put("name", "viewitem_hourly");
+ map.put("name", "view_item_hourly");
map.put("id", "0");
measureList.add(map);
- given(service.getAllAliveMeasureNameIdByOwner(Owner)).willReturn(measureList);
+ given(service.getAllAliveMeasureNameIdByOwner(owner)).willReturn(measureList);
- mvc.perform(get("/measures/owner/"+Owner).contentType(MediaType.APPLICATION_JSON))
+ mvc.perform(get("/measures/owner/" + owner).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
- .andExpect(jsonPath("$.[0].name",is("viewitem_hourly")))
+ .andExpect(jsonPath("$.[0].name", is("view_item_hourly")))
;
}
@Test
- public void testCreateNewMeasure() throws Exception{
- String measureName="viewitem_hourly";
- String org="bullseye";
- Measure measure=createATestMeasure(measureName,org);
- ObjectMapper mapper=new ObjectMapper();
- String measureJson=mapper.writeValueAsString(measure);
- given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
-
+ public void testCreateNewMeasure() throws Exception {
+ String measureName = "view_item_hourly";
+ String org = "ebay";
+ Measure measure = createATestMeasure(measureName, org);
+ ObjectMapper mapper = new ObjectMapper();
+ String measureJson = mapper.writeValueAsString(measure);
+ // CREATE_MEASURE_FAIL
+ given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_FAIL);
mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
.andExpect(status().isOk())
- .andExpect(jsonPath("$.description",is(GriffinOperationMessage.CREATE_MEASURE_SUCCESS.getDescription())))
- .andExpect(jsonPath("$.code", is(GriffinOperationMessage.CREATE_MEASURE_SUCCESS.getCode())))
- ;
- }
+ .andExpect(jsonPath("$.description", is("Create Measure Failed")))
+ .andExpect(jsonPath("$.code", is(401)));
- private Measure createATestMeasure(String name,String org)throws IOException,Exception{
- HashMap<String,String> configMap1=new HashMap<>();
- configMap1.put("database","default");
- configMap1.put("table.name","test_data_src");
- HashMap<String,String> configMap2=new HashMap<>();
- configMap2.put("database","default");
- configMap2.put("table.name","test_data_tgt");
- String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
- String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
-
- DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
- DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
- String rules = "$source.uage > 100 AND $source.ue = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-
- EvaluateRule eRule = new EvaluateRule(1,rules);
+ // CREATE_MEASURE_FAIL_DUPLICATE
+ given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
+ mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.description", is("Create Measure Failed, duplicate records")))
+ .andExpect(jsonPath("$.code", is(410)));
- Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
- return measure;
+ // CREATE_MEASURE_SUCCESS
+ given(service.createMeasure(measure)).willReturn(GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
+ mvc.perform(post("/measure").contentType(MediaType.APPLICATION_JSON).content(measureJson))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.description", is("Create Measure Succeed")))
+ .andExpect(jsonPath("$.code", is(201)));
}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
index 72cf7a4..5f929f3 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureRepoTest.java
@@ -1,125 +1,106 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.measure;
-
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
-import org.apache.griffin.core.measure.entity.Measure;
-import org.apache.griffin.core.measure.repo.DataConnectorRepo;
-import org.apache.griffin.core.measure.repo.EvaluateRuleRepo;
-import org.apache.griffin.core.measure.repo.MeasureRepo;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
-import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.test.context.jdbc.Sql;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.transaction.annotation.Propagation;
-import org.springframework.transaction.annotation.Transactional;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-@RunWith(SpringRunner.class)
-@PropertySource("classpath:application.properties")
-@DataJpaTest
-//@Sql(value = {"classpath:Init_quartz.sql", "classpath:quartz-test.sql"})
-@Sql("classpath:test.sql")
-public class MeasureRepoTest {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(MeasureRepoTest.class);
-
-
- @Autowired
- private TestEntityManager testEntityManager;
-
- @Autowired
- private MeasureRepo measureRepo;
- @Autowired
- private DataConnectorRepo dataConnectorRepo;
- @Autowired
- private EvaluateRuleRepo evaluateRuleRepo;
-
- @Test
- public void testFindOrganizations() {
- Iterable<String> orgs = measureRepo.findOrganizations();
- System.out.println(orgs);
- for (String org : orgs) {
- assertThat(org).isEqualTo("eBay");
- }
-
- }
-
- @Test
- public void testFindNameByOrganization() {
- List<String> names = measureRepo.findNameByOrganization("eBay");
- assertThat(names.get(0)).isEqualTo("buy_rates_hourly");
- assertThat(names.get(1)).isEqualTo("griffin_aver");
- }
-
- @Test
- public void testFindOrgByName() {
- assertThat(measureRepo.findOrgByName("buy_rates_hourly")).isEqualTo("eBay");
- assertThat(measureRepo.findOrgByName("griffin_aver")).isEqualTo("eBay");
- }
-
- /* @Test
- @Transactional(propagation = Propagation.NOT_SUPPORTED)
- public void testUpdateMeasure() {
- HashMap<String, String> sourceMap = new HashMap<>();
- sourceMap.put("database", "griffin");
- sourceMap.put("table.name", "count");
- DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.3", sourceMap);
- HashMap<String, String> targetMap = new HashMap<>();
- targetMap.put("database", "default");
- targetMap.put("table.name", "avr_in");
- DataConnector target = null;
- try {
- target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.4", new ObjectMapper().writeValueAsString(targetMap));
- } catch (IOException e) {
- LOGGER.error("Fail to convert map to string using ObjectMapper.");
- }
-
- EvaluateRule rule = new EvaluateRule(0, "$source['uid'] == $target['url'] AND $source['uage'] == $target['createdts']");
- //save before flushing
- dataConnectorRepo.save(source);
- dataConnectorRepo.save(target);
- evaluateRuleRepo.save(rule);
- measureRepo.updateMeasure((long) 1, "new desc2", "Paypal", source, target, rule);
- for (Measure measure : measureRepo.findAll()) {
- if (measure.getId().equals((long) 1)) {
- assertThat(measure.getDescription()).isEqualTo("new desc2");
- assertThat(measure.getOrganization()).isEqualTo("Paypal");
- assertThat(measure.getSource()).isEqualTo(source);
- assertThat(measure.getTarget()).isEqualTo(target);
- assertThat(measure.getEvaluateRule()).isEqualTo(rule);
- }
- }
-
- }*/
-}
+package org.apache.griffin.core.measure;//package org.apache.griffin.core.measure;
+//
+//import org.apache.griffin.core.measure.entity.DataConnector;
+//import org.apache.griffin.core.measure.entity.EvaluateRule;
+//import org.apache.griffin.core.measure.entity.Measure;
+//import org.apache.griffin.core.measure.repo.DataConnectorRepo;
+//import org.apache.griffin.core.measure.repo.EvaluateRuleRepo;
+//import org.apache.griffin.core.measure.repo.MeasureRepo;
+//import org.codehaus.jackson.map.ObjectMapper;
+//import org.junit.Test;
+//import org.junit.runner.RunWith;
+//import org.slf4j.Logger;
+//import org.slf4j.LoggerFactory;
+//import org.springframework.beans.factory.annotation.Autowired;
+//import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
+//import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
+//import org.springframework.context.annotation.PropertySource;
+//import org.springframework.test.context.jdbc.Sql;
+//import org.springframework.test.context.junit4.SpringRunner;
+//import org.springframework.transaction.annotation.Propagation;
+//import org.springframework.transaction.annotation.Transactional;
+//
+//import java.io.IOException;
+//import java.util.HashMap;
+//import java.util.List;
+//
+//import static org.assertj.core.api.Assertions.assertThat;
+//
+//@RunWith(SpringRunner.class)
+//@PropertySource("classpath:application.properties")
+//@DataJpaTest
+////@Sql(value = {"classpath:Init_quartz.sql", "classpath:quartz-test.sql"})
+//@Sql("classpath:test.sql")
+//public class MeasureRepoTest {
+//
+// private static final Logger LOGGER = LoggerFactory.getLogger(MeasureRepoTest.class);
+//
+//
+// @Autowired
+// private TestEntityManager testEntityManager;
+//
+// @Autowired
+// private MeasureRepo measureRepo;
+// @Autowired
+// private DataConnectorRepo dataConnectorRepo;
+// @Autowired
+// private EvaluateRuleRepo evaluateRuleRepo;
+//
+// @Test
+// public void testFindOrganizations() {
+// Iterable<String> orgs = measureRepo.findOrganizations();
+// System.out.println(orgs);
+// for (String org : orgs) {
+// assertThat(org).isEqualTo("eBay");
+// }
+//
+// }
+//
+// @Test
+// public void testFindNameByOrganization() {
+// List<String> names = measureRepo.findNameByOrganization("eBay");
+// assertThat(names.get(0)).isEqualTo("buy_rates_hourly");
+// assertThat(names.get(1)).isEqualTo("griffin_aver");
+// }
+//
+// @Test
+// public void testFindOrgByName() {
+// assertThat(measureRepo.findOrgByName("buy_rates_hourly")).isEqualTo("eBay");
+// assertThat(measureRepo.findOrgByName("griffin_aver")).isEqualTo("eBay");
+// }
+//
+// /* @Test
+// @Transactional(propagation = Propagation.NOT_SUPPORTED)
+// public void testUpdateMeasure() {
+// HashMap<String, String> sourceMap = new HashMap<>();
+// sourceMap.put("database", "griffin");
+// sourceMap.put("table.name", "count");
+// DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.3", sourceMap);
+// HashMap<String, String> targetMap = new HashMap<>();
+// targetMap.put("database", "default");
+// targetMap.put("table.name", "avr_in");
+// DataConnector target = null;
+// try {
+// target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.4", new ObjectMapper().writeValueAsString(targetMap));
+// } catch (IOException e) {
+// LOGGER.error("Fail to convert map to string using ObjectMapper.");
+// }
+//
+// EvaluateRule rule = new EvaluateRule(0, "$source['uid'] == $target['url'] AND $source['uage'] == $target['createdts']");
+// //save before flushing
+// dataConnectorRepo.save(source);
+// dataConnectorRepo.save(target);
+// evaluateRuleRepo.save(rule);
+// measureRepo.updateMeasure((long) 1, "new desc2", "Paypal", source, target, rule);
+// for (Measure measure : measureRepo.findAll()) {
+// if (measure.getId().equals((long) 1)) {
+// assertThat(measure.getDescription()).isEqualTo("new desc2");
+// assertThat(measure.getOrganization()).isEqualTo("Paypal");
+// assertThat(measure.getSource()).isEqualTo(source);
+// assertThat(measure.getTarget()).isEqualTo(target);
+// assertThat(measure.getEvaluateRule()).isEqualTo(rule);
+// }
+// }
+//
+// }*/
+//}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
index 00fad9c..bc6b2ae 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureServiceImplTest.java
@@ -20,10 +20,7 @@ under the License.
package org.apache.griffin.core.measure;
-import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.griffin.core.job.JobServiceImpl;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
import org.apache.griffin.core.measure.entity.Measure;
import org.apache.griffin.core.measure.repo.MeasureRepo;
import org.apache.griffin.core.util.GriffinOperationMessage;
@@ -32,180 +29,117 @@ import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.boot.test.mock.mockito.MockBean;
-import org.springframework.context.annotation.Bean;
import org.springframework.test.context.junit4.SpringRunner;
-import java.io.IOException;
-import java.util.*;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
-import static org.assertj.core.api.Assertions.fail;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
+import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.doNothing;
@RunWith(SpringRunner.class)
public class MeasureServiceImplTest {
- /*@TestConfiguration
- public static class MeasureServiceImplConfiguration{
- @Bean
- public MeasureServiceImpl service(){
- return new MeasureServiceImpl();
- }
-
- @Bean
- public JobServiceImpl JobService(){
- return new JobServiceImpl();
- }
-
- }*/
-
@InjectMocks
private MeasureServiceImpl service;
@Mock
private MeasureRepo measureRepo;
+ @Mock
+ private JobServiceImpl jobService;
@Before
- public void setup(){
+ public void setup() {
}
@Test
- public void testGetAllMeasures(){
- try {
- Iterable<Measure> tmp = service.getAllAliveMeasures();
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot get all Measure from dbs");
- }
+ public void testGetAllMeasures() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ given(measureRepo.findByDeleted(false)).willReturn(Arrays.asList(measure));
+ List<Measure> measures = (List<Measure>) service.getAllAliveMeasures();
+ assertThat(measures.size()).isEqualTo(1);
+ assertThat(measures.get(0).getName()).isEqualTo("view_item_hourly");
}
@Test
- public void testGetMeasuresById(){
- try {
- Measure tmp = service.getMeasureById(1);
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot get Measure in db By Id: 1");
- }
+ public void testGetMeasuresById() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ given(measureRepo.findOne(1L)).willReturn(measure);
+ Measure m = service.getMeasureById(1);
+ assertEquals(m.getName(), measure.getName());
}
- /* @Test
- public void testGetMeasuresByName(){
- try {
- Measure tmp = service.getMeasureByName("viewitem_hourly");
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot get Measure in db By name: viewitem_hourly");
- }
- }*/
@Test
- public void testDeleteMeasuresById(){
- try {
- service.deleteMeasureById(1L);
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot delete Measure in db By Id: 1");
- }
+ public void testDeleteMeasuresById() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ // RESOURCE_NOT_FOUND
+ given(measureRepo.exists(1L)).willReturn(false);
+ GriffinOperationMessage message1 = service.deleteMeasureById(1L);
+ assertEquals(message1, GriffinOperationMessage.RESOURCE_NOT_FOUND);
+
+ //DELETE_MEASURE_BY_ID_SUCCESS
+ given(measureRepo.exists(1L)).willReturn(true);
+ given(measureRepo.findOne(1L)).willReturn(measure);
+ doNothing().when(jobService).deleteJobsRelateToMeasure(measure);
+ given(measureRepo.save(measure)).willReturn(measure);
+ GriffinOperationMessage message = service.deleteMeasureById(1L);
+ assertEquals(message, GriffinOperationMessage.DELETE_MEASURE_BY_ID_SUCCESS);
}
- /*@Test
- public void testDeleteMeasuresByName(){
- try {
- String measureName="viewitem_hourly";
- given(measureRepo.findByName(measureName)).willReturn(null);
- GriffinOperationMessage message=service.deleteMeasureByName("viewitem_hourly");
- assertEquals(message,GriffinOperationMessage.RESOURCE_NOT_FOUND);
- assertTrue(true);
-
- String org="bullseye";
- Measure measure=createATestMeasure(measureName,org);
- given(measureRepo.findByName(measureName)).willReturn(measure);
- GriffinOperationMessage message1=service.deleteMeasureByName("viewitem_hourly");
- assertEquals(message1,GriffinOperationMessage.DELETE_MEASURE_BY_NAME_SUCCESS);
- }catch (Throwable t){
- fail("Cannot delete Measure in db By name: viewitem_hourly");
- }
- }*/
@Test
- public void testCreateNewMeasure() {
- try {
- // CREATE_MEASURE_SUCCESS
- String measureName = "viewitem_hourly";
- String org = "bullseye";
- Measure measure = createATestMeasure(measureName, org);
- given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
- given(measureRepo.save(measure)).willReturn(measure);
- GriffinOperationMessage message = service.createMeasure(measure);
- assertEquals(message, GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
- assertTrue(true);
- // CREATE_MEASURE_FAIL_DUPLICATE
- Measure measure1 = createATestMeasure(measureName, "bullseye1");
- LinkedList<Measure> list = new LinkedList<>();
- list.add(measure);
- given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(list);
- GriffinOperationMessage message1 = service.createMeasure(measure);
- assertEquals(message1, GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
- // CREATE_MEASURE_FAIL
- given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
- given(measureRepo.save(measure)).willReturn(null);
- GriffinOperationMessage message2 = service.createMeasure(measure);
- assertEquals(message2, GriffinOperationMessage.CREATE_MEASURE_FAIL);
- }catch (Throwable t){
- fail("Cannot create new measure viewitem_hourly");
- }
+ public void testCreateNewMeasure() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ // CREATE_MEASURE_SUCCESS
+ String measureName = "view_item_hourly";
+ given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
+ given(measureRepo.save(measure)).willReturn(measure);
+ GriffinOperationMessage message = service.createMeasure(measure);
+ assertEquals(message, GriffinOperationMessage.CREATE_MEASURE_SUCCESS);
+
+ // CREATE_MEASURE_FAIL_DUPLICATE
+ LinkedList<Measure> list = new LinkedList<>();
+ list.add(measure);
+ given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(list);
+ GriffinOperationMessage message1 = service.createMeasure(measure);
+ assertEquals(message1, GriffinOperationMessage.CREATE_MEASURE_FAIL_DUPLICATE);
+
+ // CREATE_MEASURE_FAIL
+ given(measureRepo.findByNameAndDeleted(measureName, false)).willReturn(new LinkedList<>());
+ given(measureRepo.save(measure)).willReturn(null);
+ GriffinOperationMessage message2 = service.createMeasure(measure);
+ assertEquals(message2, GriffinOperationMessage.CREATE_MEASURE_FAIL);
}
@Test
- public void testGetAllMeasureByOwner(){
- try {
- String measureName="viewitem_hourly";
- String org="bullseye";
- Measure measure=createATestMeasure(measureName,org);
- String owner="test1";
- given(measureRepo.findAll()).willReturn(Arrays.asList(measure));
- List<Map<String, String>> namelist=service.getAllAliveMeasureNameIdByOwner(owner);
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot get all measure name by owner test1");
- }
+ public void testGetAllMeasureByOwner() throws Exception {
+ String owner = "test";
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ measure.setId(1L);
+ given(measureRepo.findByOwnerAndDeleted(owner, false)).willReturn(Arrays.asList(measure));
+ List<Map<String, String>> list = service.getAllAliveMeasureNameIdByOwner(owner);
+ assertEquals(list.get(0).get("name"), measure.getName());
}
@Test
- public void testUpdateMeasure(){
- try {
- String measureName="viewitem_hourly";
- String org="bullseye";
- Measure measure=createATestMeasure(measureName,org);
- GriffinOperationMessage message=service.updateMeasure(measure);
- assertTrue(true);
- }catch (Throwable t){
- fail("Cannot create new measure viewitem_hourly");
- }
+ public void testUpdateMeasure() throws Exception {
+ Measure measure = createATestMeasure("view_item_hourly", "ebay");
+ // RESOURCE_NOT_FOUND
+ given(measureRepo.exists(measure.getId())).willReturn(false);
+ GriffinOperationMessage message1 = service.updateMeasure(measure);
+ assertEquals(message1, GriffinOperationMessage.RESOURCE_NOT_FOUND);
+
+ //UPDATE_MEASURE_SUCCESS
+ given(measureRepo.exists(measure.getId())).willReturn(true);
+ given(measureRepo.save(measure)).willReturn(measure);
+ GriffinOperationMessage message2 = service.updateMeasure(measure);
+ assertEquals(message2, GriffinOperationMessage.UPDATE_MEASURE_SUCCESS);
}
- private Measure createATestMeasure(String name,String org)throws IOException,Exception{
- HashMap<String,String> configMap1;
- configMap1 = new HashMap<>();
- configMap1.put("database","default");
- configMap1.put("table.name","test_data_src");
- HashMap<String,String> configMap2=new HashMap<>();
- configMap2.put("database","default");
- configMap2.put("table.name","test_data_tgt");
- String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
- String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
- DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
- DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
- String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
- EvaluateRule eRule = new EvaluateRule(1,rules);
- Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
- measure.setId(0L);
- return measure;
- }
}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java b/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
new file mode 100644
index 0000000..7d48f5d
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/measure/MeasureTestHelper.java
@@ -0,0 +1,55 @@
+package org.apache.griffin.core.measure;
+
+
+import org.apache.griffin.core.measure.entity.*;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.quartz.JobDataMap;
+import org.quartz.Trigger;
+import org.quartz.impl.JobDetailImpl;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+
+public class MeasureTestHelper {
+ public static Measure createATestMeasure(String name, String org) throws Exception{
+ HashMap<String, String> configMap1 = new HashMap<>();
+ configMap1.put("database", "default");
+ configMap1.put("table.name", "test_data_src");
+ HashMap<String, String> configMap2 = new HashMap<>();
+ configMap2.put("database", "default");
+ configMap2.put("table.name", "test_data_tgt");
+ String configJson1 = new ObjectMapper().writeValueAsString(configMap1);
+ String configJson2 = new ObjectMapper().writeValueAsString(configMap2);
+
+ DataSource dataSource = new DataSource("source", Arrays.asList(new DataConnector("HIVE", "1.2", configJson1)));
+ DataSource targetSource = new DataSource("target", Arrays.asList(new DataConnector("HIVE", "1.2", configJson2)));
+
+ List<DataSource> dataSources = new ArrayList<>();
+ dataSources.add(dataSource);
+ dataSources.add(targetSource);
+ String rules = "source.id=target.id AND source.name=target.name AND source.age=target.age";
+ Rule rule = new Rule("griffin-dsl", "accuracy", rules);
+ EvaluateRule evaluateRule = new EvaluateRule(Arrays.asList(rule));
+ return new Measure(name, "description", org, "batch", "test", dataSources, evaluateRule);
+ }
+
+ public static JobDetailImpl createJobDetail() {
+ JobDetailImpl jobDetail = new JobDetailImpl();
+ JobDataMap jobInfoMap = new JobDataMap();
+ jobInfoMap.put("triggerState", Trigger.TriggerState.NORMAL);
+ jobInfoMap.put("measureId", "1");
+ jobInfoMap.put("sourcePattern", "YYYYMMdd-HH");
+ jobInfoMap.put("targetPattern", "YYYYMMdd-HH");
+ jobInfoMap.put("jobStartTime", "1506356105876");
+ jobInfoMap.put("interval", "3000");
+ jobInfoMap.put("deleted", "false");
+ jobInfoMap.put("blockStartTimestamp","1506634804254");
+ jobInfoMap.put("lastBlockStartTimestamp","1506634804254");
+ jobInfoMap.put("groupName","BA");
+ jobInfoMap.put("jobName","jobName");
+ jobDetail.setJobDataMap(jobInfoMap);
+ return jobDetail;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java b/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
index 6560e51..cd9e00e 100644
--- a/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
+++ b/service/src/test/java/org/apache/griffin/core/measure/repo/MeasureRepoTest.java
@@ -19,8 +19,6 @@ under the License.
package org.apache.griffin.core.measure.repo;
-import org.apache.griffin.core.measure.entity.DataConnector;
-import org.apache.griffin.core.measure.entity.EvaluateRule;
import org.apache.griffin.core.measure.entity.Measure;
import org.junit.Before;
import org.junit.Test;
@@ -30,9 +28,9 @@ import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import org.springframework.test.context.junit4.SpringRunner;
-import java.util.HashMap;
import java.util.List;
+import static org.apache.griffin.core.measure.MeasureTestHelper.createATestMeasure;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(SpringRunner.class)
@@ -53,56 +51,35 @@ public class MeasureRepoTest {
}
@Test
- public void testFindAllOrganizations() throws Exception {
+ public void testFindAllOrganizations() {
List<String> orgs = measureRepo.findOrganizations();
assertThat(orgs.size()).isEqualTo(3);
}
@Test
- public void testFindNameByOrganization() throws Exception {
+ public void testFindNameByOrganization() {
List<String> orgs = measureRepo.findNameByOrganization("org1");
assertThat(orgs.size()).isEqualTo(1);
- assertThat(orgs.get(0)).isEqualToIgnoringCase("m2");
+ assertThat(orgs.get(0)).isEqualToIgnoringCase("m1");
}
@Test
- public void testFindOrgByName() throws Exception {
- String org = measureRepo.findOrgByName("m3");
+ public void testFindOrgByName() {
+ String org = measureRepo.findOrgByName("m2");
assertThat(org).isEqualTo("org2");
}
- private Measure createATestMeasure(String name,String org)throws Exception{
- HashMap<String,String> configMap1=new HashMap<>();
- configMap1.put("database","default");
- configMap1.put("table.name","test_data_src");
- HashMap<String,String> configMap2=new HashMap<>();
- configMap2.put("database","default");
- configMap2.put("table.name","test_data_tgt");
- String configJson1 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap1);
- String configJson2 = new org.codehaus.jackson.map.ObjectMapper().writeValueAsString(configMap2);
-
- DataConnector source = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson1);
- DataConnector target = new DataConnector(DataConnector.ConnectorType.HIVE, "1.2", configJson2);
-
- String rules = "$source.uage > 100 AND $source.uid = $target.uid AND $source.uage + 12 = $target.uage + 10 + 2 AND $source.udes + 11 = $target.udes + 1 + 1";
-
- EvaluateRule eRule = new EvaluateRule(1,rules);
-
- Measure measure = new Measure(name,"bevssoj description", Measure.MearuseType.accuracy, org, source, target, eRule,"test1");
-
- return measure;
- }
public void setEntityManager() throws Exception {
- Measure measure=createATestMeasure("m1","bullseye");
+ Measure measure = createATestMeasure("m1", "org1");
entityManager.persistAndFlush(measure);
- Measure measure2=createATestMeasure("m2","org1");
+ Measure measure2 = createATestMeasure("m2", "org2");
entityManager.persistAndFlush(measure2);
- Measure measure3=createATestMeasure("m3","org2");
+ Measure measure3 = createATestMeasure("m3", "org3");
entityManager.persistAndFlush(measure3);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
new file mode 100644
index 0000000..d1bbdb6
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreControllerTest.java
@@ -0,0 +1,128 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied. See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.web.servlet.MockMvc;
+
+import java.util.*;
+
+import static org.hamcrest.Matchers.*;
+import static org.mockito.BDDMockito.given;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+@RunWith(SpringRunner.class)
+@WebMvcTest(value = HiveMetaStoreController.class, secure = false)
+public class HiveMetaStoreControllerTest {
+
+ @Autowired
+ private MockMvc mockMvc;
+
+ @MockBean
+ private HiveMetaStoreService hiveMetaStoreService;
+
+
+ @Before
+ public void setup() {
+ }
+
+ @Test
+ public void testGetAllDatabases() throws Exception {
+ String dbName = "default";
+ given(hiveMetaStoreService.getAllDatabases()).willReturn(Arrays.asList(dbName));
+
+ mockMvc.perform(get("/metadata/hive/db"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.[0]", is(dbName)));
+ }
+
+ @Test
+ public void testGetDefAllTables() throws Exception {
+ String tableName = "table";
+ given(hiveMetaStoreService.getAllTableNames("")).willReturn(Arrays.asList(tableName));
+
+ mockMvc.perform(get("/metadata/hive/table"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.[0]", is(tableName)));
+ }
+
+ @Test
+ public void testGetAllTableNames() throws Exception {
+ String dbName = "default";
+ String tableName = "table";
+ given(hiveMetaStoreService.getAllTableNames(dbName)).willReturn(Arrays.asList(tableName));
+
+ mockMvc.perform(get("/metadata/hive/allTableNames").param("db", dbName))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.[0]", is(tableName)));
+ }
+
+ @Test
+ public void testGetAllTablesWithDb() throws Exception {
+ String dbName = "default";
+ given(hiveMetaStoreService.getAllTable(dbName)).willReturn(Arrays.asList(new Table()));
+
+ mockMvc.perform(get("/metadata/hive/db/allTables").param("db", dbName))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.[0].tableName", is(nullValue())));
+ }
+
+ @Test
+ public void testGetAllTables() throws Exception {
+ Map<String, List<Table>> results = new HashMap<>();
+ results.put("table", new ArrayList<>());
+ given(hiveMetaStoreService.getAllTable()).willReturn(results);
+
+ mockMvc.perform(get("/metadata/hive/allTables"))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.table", hasSize(0)));
+ }
+
+ @Test
+ public void testGetDefTable() throws Exception {
+ String dbName = "";
+ String tableName = "table";
+ given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(new Table(tableName, null, null, 0, 0, 0, null, null, null, null, null, null));
+
+ mockMvc.perform(get("/metadata/hive/default/{table}", tableName))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.tableName", is(tableName)));
+ }
+
+ @Test
+ public void testGetTable() throws Exception {
+ String dbName = "default";
+ String tableName = "table";
+ given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn(new Table(tableName, null, null, 0, 0, 0, null, null, null, null, null, null));
+
+ mockMvc.perform(get("/metadata/hive").param("db", dbName).param("table", tableName))
+ .andExpect(status().isOk())
+ .andExpect(jsonPath("$.tableName", is(tableName)));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
new file mode 100644
index 0000000..d732e34
--- /dev/null
+++ b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetaStoreServiceImplTest.java
@@ -0,0 +1,145 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied. See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.metastore.hive;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.thrift.TException;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.test.context.junit4.SpringRunner;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.BDDMockito.given;
+import static org.mockito.Mockito.doNothing;
+
+
+@RunWith(SpringRunner.class)
+public class HiveMetaStoreServiceImplTest {
+
+ @TestConfiguration
+ public static class HiveMetaStoreServiceConfiguration {
+ @Bean
+ public HiveMetaStoreService service() {
+ return new HiveMetaStoreServiceImpl();
+ }
+ }
+
+ @MockBean
+ private HiveMetaStoreClient client;
+
+ @Autowired
+ private HiveMetaStoreService service;
+
+ @Before
+ public void setup() {
+
+ }
+
+ @Test
+ public void testGetAllDatabases() throws MetaException {
+ given(client.getAllDatabases()).willReturn(Arrays.asList("default"));
+ assertEquals(service.getAllDatabases().iterator().hasNext(), true);
+
+ // MetaException
+ given(client.getAllDatabases()).willThrow(MetaException.class);
+ doNothing().when(client).reconnect();
+ service.getAllDatabases();
+ assertTrue(service.getAllDatabases() == null);
+
+ }
+
+
+ @Test
+ public void testGetAllTableNames() throws MetaException {
+ String dbName = "default";
+ given(client.getAllTables(dbName)).willReturn(Arrays.asList(dbName));
+ assertEquals(service.getAllTableNames(dbName).iterator().hasNext(), true);
+
+ // MetaException
+ given(client.getAllTables(dbName)).willThrow(MetaException.class);
+ doNothing().when(client).reconnect();
+ assertTrue(service.getAllTableNames(dbName) == null);
+
+ }
+
+ @Test
+ public void testGetAllTableByDBName() throws TException {
+ String useDbName = "default";
+ String tableName = "table";
+ given(client.getAllTables(useDbName)).willReturn(Arrays.asList(tableName));
+ given(client.getTable(useDbName, tableName)).willReturn(new Table());
+ assertEquals(service.getAllTable(useDbName).size(), 1);
+
+ // MetaException
+ given(client.getAllTables(useDbName)).willThrow(MetaException.class);
+ doNothing().when(client).reconnect();
+ assertEquals(service.getAllTable(useDbName).size(), 0);
+ }
+
+ @Test
+ public void testGetAllTable() throws TException {
+ String useDbName = "default";
+ String tableName = "table";
+ List<String> databases = Arrays.asList(useDbName);
+ given(client.getAllDatabases()).willReturn(databases);
+ given(client.getAllTables(databases.iterator().next())).willReturn(Arrays.asList(tableName));
+ given(client.getTable(useDbName, tableName)).willReturn(new Table());
+ assertEquals(service.getAllTable().size(), 1);
+
+ //pls attention:do not change the position of the following two MetaException test
+ //because we use throw exception,so they are in order.
+ // MetaException1
+ given(client.getAllDatabases()).willReturn(databases);
+ given(client.getAllTables(useDbName)).willThrow(MetaException.class);
+ doNothing().when(client).reconnect();
+ assertEquals(service.getAllTable().get(useDbName).size(), 0);
+
+ // MetaException2
+ given(client.getAllDatabases()).willThrow(MetaException.class);
+ doNothing().when(client).reconnect();
+ assertEquals(service.getAllTable().size(), 0);
+
+
+ }
+
+ @Test
+ public void testGetTable() throws Exception {
+ String dbName = "default";
+ String tableName = "tableName";
+ given(client.getTable(dbName, tableName)).willReturn(new Table());
+ assertTrue(service.getTable(dbName, tableName) != null);
+
+ //getTable throws Exception
+ given(client.getTable(dbName, tableName)).willThrow(Exception.class);
+ doNothing().when(client).reconnect();
+ assertTrue(service.getTable(dbName, tableName) == null);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/43f9dbf7/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java b/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
deleted file mode 100644
index 50ece98..0000000
--- a/service/src/test/java/org/apache/griffin/core/metastore/hive/HiveMetastoreControllerTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied. See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.metastore.hive;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.springframework.test.context.junit4.SpringRunner;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-
-@RunWith(SpringRunner.class)
-public class HiveMetastoreControllerTest {
- private MockMvc mockMvc;
-
- @Mock
- HiveMetastoreServiceImpl hiveMetastoreService;
-
- @InjectMocks
- private HiveMetastoreController hiveMetastoreController;
-
- @Before
- public void setup(){
- MockitoAnnotations.initMocks(this);
- this.mockMvc = MockMvcBuilders.standaloneSetup(hiveMetastoreController).build();
- }
-
- @Test
- public void test_getAllDatabases() throws Exception {
- when(hiveMetastoreService.getAllDatabases()).thenReturn(null);
- mockMvc.perform(get("/metadata/hive/db"))
- .andExpect(status().isOk());
- verify(hiveMetastoreService).getAllDatabases();
- }
-
-
- @Test
- public void test_getAllTableNames() throws Exception {
- String db="default";
- when(hiveMetastoreService.getAllTableNames(db)).thenReturn(null);
- mockMvc.perform(get("/metadata/hive/allTableNames?db="+db))
- .andExpect(status().isOk());
- verify(hiveMetastoreService).getAllTableNames(db);
- }
-
- @Test
- public void test_getAllTables() throws Exception {
- String db="default";
- when(hiveMetastoreService.getAllTablesByDbName(db)).thenReturn(null);
- mockMvc.perform(get("/metadata/hive/db/allTables?db={db}",db))
- .andExpect(status().isOk());
- verify(hiveMetastoreService).getAllTablesByDbName(db);
- }
-
- /**
- */
- @Test
- public void test_getAllTables2() throws Exception {
- when(hiveMetastoreService.getAllTable()).thenReturn(null);
- mockMvc.perform(get("/metadata/hive/allTables"))
- .andExpect(status().isOk());
- verify(hiveMetastoreService).getAllTable();
- }
-
-
-
- @Test
- public void test_getTable() throws Exception{
- String db="default";
- String table="cout";
- when(hiveMetastoreService.getTable(db,table)).thenReturn(null);
- mockMvc.perform(get("/metadata/hive/table?db={db}&table={table}",db,table))
- .andExpect(status().isOk());
- verify(hiveMetastoreService).getTable(db,table);
- }
-}