You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by gu...@apache.org on 2017/05/04 03:04:56 UTC

[48/51] [partial] incubator-griffin git commit: refactor arch

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/DqScheduleServiceImpl.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/DqScheduleServiceImpl.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/DqScheduleServiceImpl.java
deleted file mode 100644
index 27cc138..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/DqScheduleServiceImpl.java
+++ /dev/null
@@ -1,670 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.commons.io.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.PropertySource;
-import org.springframework.stereotype.Service;
-//import org.springframework.validation.annotation.Validated;
-
-
-
-
-
-
-
-
-
-
-
-
-import com.ebay.oss.griffin.common.Pair;
-import com.ebay.oss.griffin.common.ScheduleModelSeperator;
-import com.ebay.oss.griffin.domain.DataAsset;
-import com.ebay.oss.griffin.domain.DqJob;
-import com.ebay.oss.griffin.domain.DqMetricsValue;
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.DqSchedule;
-import com.ebay.oss.griffin.domain.JobStatus;
-import com.ebay.oss.griffin.domain.ModelStatus;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.domain.PartitionFormat;
-import com.ebay.oss.griffin.domain.SampleFilePathLKP;
-import com.ebay.oss.griffin.domain.ScheduleType;
-import com.ebay.oss.griffin.domain.SystemType;
-import com.ebay.oss.griffin.repo.DataAssetRepo;
-import com.ebay.oss.griffin.repo.DqJobRepo;
-import com.ebay.oss.griffin.repo.DqMetricsRepo;
-import com.ebay.oss.griffin.repo.DqModelRepo;
-import com.ebay.oss.griffin.repo.DqScheduleRepo;
-import com.ebay.oss.griffin.vo.AccuracyHiveJobConfig;
-import com.ebay.oss.griffin.vo.AccuracyHiveJobConfigDetail;
-import com.ebay.oss.griffin.vo.PartitionConfig;
-import com.ebay.oss.griffin.vo.ValidateHiveJobConfig;
-import com.google.gson.Gson;
-import com.mongodb.DBObject;
-
-@PropertySource("classpath:application.properties")
-@Service ("scheduleService")
-public class DqScheduleServiceImpl implements DqScheduleService {
-	private static Logger logger = LoggerFactory.getLogger(DqScheduleServiceImpl.class);
-
-	public static String resultFile = "_RESULT";
-	public static String startFile = "_START";
-	public static String finishFile = "_FINISH";
-	public static String logFile = "dqjoblog";
-
-	@Autowired
-	DQMetricsService dqMetricsService;
-
-	@Autowired
-	DqModelService dqModelService;
-	
-	@Autowired DqModelRepo dqModelRepo;
-
-	@Autowired DqMetricsRepo metricsRepo;
-
-	@Autowired
-	private DqScheduleRepo scheduleRepo;
-
-	@Autowired
-    private DqJobRepo jobRepo;
-
-	@Autowired
-    private DataAssetRepo dataAssetRepo;
-	
-	@Override
-	public void schedulingJobs() {
-		logger.info("===========checking new jobs===============");
-
-		createJobToRunBySchedule();
-
-		generateAllWaitingJobsRunningConfigs();
-
-		checkAllJOBSStatus();
-
-		updateModelStatus(ModelStatus.TESTING, ModelStatus.VERIFIED);
-
-		logger.info("===========checking jobs done===============");
-	}
-
-    void createJobToRunBySchedule() {
-        for (DqSchedule schedule : scheduleRepo.getAll()) {
-            long now = new Date().getTime();
-            long startTime = schedule.getStarttime();
-            if (now < startTime) {
-                continue;
-            }
-
-            Calendar c = Calendar.getInstance();
-            Date date = new Date(startTime);
-            c.setTime(date);
-            int type = schedule.getScheduleType();
-            if (type == ScheduleType.DAILY) {
-                c.add(Calendar.DATE, 1);
-            } else if (type == ScheduleType.HOURLY) {
-                c.add(Calendar.HOUR, 1);
-            } else if (type == ScheduleType.WEEKLY) {
-                c.add(Calendar.DATE, 7);
-            } else if (type == ScheduleType.MONTHLY) {
-                c.add(Calendar.MONTH, 1);
-            } else {
-                continue;
-            }
-            startTime = c.getTime().getTime();
-            schedule.setStarttime(startTime);
-
-            DqJob job = new DqJob();
-            job.setModelList(schedule.getModelList());
-            job.setStarttime(startTime);
-            job.setStatus(0);
-            job.setId(schedule.getModelList() + "_" + startTime); // this is the job.id generation logic
-            job.setJobType(schedule.getJobType());
-            int result = jobRepo.newJob(job);
-            if (result == 0) {
-                logger.info("===================new model failure");
-                continue;
-            }
-
-            scheduleRepo.save(schedule);
-
-        }
-	}
-
-	String updateHDFSDirTemplateString(String dir,String dateString,String hourString) {
-		String result = dir;
-		result = result.replaceAll("\\[YYYYMMDD\\]", dateString);
-		result = result.replaceAll("\\[YYYY\\-MM\\-DD\\]", dateString.substring(0,4)+"-"+dateString.substring(4,6)+"-"+dateString.substring(6,8));
-		result = result.replaceAll("\\[YYYY\\]", dateString.substring(0,4));
-		result = result.replaceAll("\\[YY\\]", dateString.substring(2,4));
-		result = result.replaceAll("\\[MM\\]", dateString.substring(4,6));
-		result = result.replaceAll("\\[DD\\]", dateString.substring(6,8));
-		result = result.replaceAll("\\[HH\\]", hourString);
-		result = result.replaceAll("\\[yyyymmdd\\]", dateString);
-		result = result.replaceAll("\\[yyyy\\-mm\\-dd\\]", dateString.substring(0,4)+"-"+dateString.substring(4,6)+"-"+dateString.substring(6,8));
-		result = result.replaceAll("\\[yyyy\\]", dateString.substring(0,4));
-		result = result.replaceAll("\\[yy\\]", dateString.substring(3,4));
-		result = result.replaceAll("\\[mm\\]", dateString.substring(4,6));
-		result = result.replaceAll("\\[dd\\]", dateString.substring(6,8));
-		result = result.replaceAll("\\[hh\\]", hourString);
-		return result;
-	}
-
-	void generateAllWaitingJobsRunningConfigs() {
-		try{
-			logger.info("===========generating running config===============");
-			Properties env = new Properties();
-			env.load(Thread.currentThread().getContextClassLoader()
-					.getResourceAsStream("application.properties"));
-			String environment = env.getProperty("env");
-
-			for(DqJob eachJob : jobRepo.getByStatus(JobStatus.READY)) {
-				String jobid = eachJob.getId();
-				int jobtype = eachJob.getJobType();
-				StringBuffer doneFiles = new StringBuffer();
-				StringBuffer runningParameter = new StringBuffer();
-
-				if(jobtype==ModelType.ACCURACY) {
-					String modelid = eachJob.getModelList();
-					long ts = eachJob.getStarttime();
-					Date dt = new Date(ts);
-					SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
-					String dateString = formatter.format(dt);
-					SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
-					String hourString = formatter2.format(dt);
-
-					DqModel model = dqModelRepo.findByColumn("modelId", modelid);
-					if(model==null) {
-						logger.warn( "===================can not find model "+modelid);
-						continue;
-					}
-
-					String content = model.getModelContent();
-
-					String[] contents = content.split("\\|");
-					String srcPlatform = contents[0];
-					String srcSystem = contents[1];
-					String tgtPlatform = contents[2];
-					String tgtSystem = contents[3];
-
-					String[] attributesArray = contents[4].split(";");
-					String[] attributes = attributesArray[0].split(",");
-					String srcDataset = attributes[0].substring(0,attributes[0].lastIndexOf("."));
-					String tgtDataset = attributes[1].substring(0,attributes[1].lastIndexOf("."));
-
-					//					runningParameter.append(System.getProperty("line.separator")+srcPlatform+" "+srcSystem+" "+srcDataset);
-					//					runningParameter.append(System.getProperty("line.separator")+tgtPlatform+" "+tgtSystem+" "+tgtDataset);
-
-					List<Pair> queryList = new ArrayList<Pair>();
-					queryList.add(new Pair("platform", srcPlatform));
-					queryList.add(new Pair("system", srcSystem));
-					queryList.add(new Pair("assetName", srcDataset));
-					logger.info( "===================find source object "+srcPlatform+" "+srcSystem+" "+srcDataset);
-					DBObject srcObj = dataAssetRepo.getByCondition(queryList);
-					DataAsset srcAsset = new DataAsset(srcObj);
-
-					List<Pair> queryList2 = new ArrayList<Pair>();
-					queryList2.add(new Pair("platform", tgtPlatform));
-					queryList2.add(new Pair("system", tgtSystem));
-					queryList2.add(new Pair("assetName", tgtDataset));
-					logger.info( "===================find target object "+tgtPlatform+" "+tgtSystem+" "+tgtDataset);
-					DBObject tgtObj = dataAssetRepo.getByCondition(queryList2);
-					DataAsset tgtAsset = new DataAsset(tgtObj);
-
-					doneFiles.append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(),dateString,hourString)
-							+System.getProperty("line.separator")
-							+updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(),dateString,hourString)
-							+System.getProperty("line.separator"));
-					if(model.getSchedule()==ScheduleType.HOURLY && model.getSystem()==SystemType.BULLSEYE)
-					{
-						Date dt4be = new Date(ts+3600000);
-//						SimpleDateFormat formatter4be = new SimpleDateFormat("yyyyMMdd");
-						String dateString4be = formatter.format(dt4be);
-//						SimpleDateFormat formatter24be = new SimpleDateFormat("HH");
-						String hourString4be = formatter2.format(dt4be);
-						doneFiles.append(updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(),dateString4be,hourString4be)
-								+System.getProperty("line.separator"));
-					}
-
-
-					AccuracyHiveJobConfig config = new AccuracyHiveJobConfig();
-					List<AccuracyHiveJobConfigDetail> configDetailList = new ArrayList<AccuracyHiveJobConfigDetail>();
-					for(String tempAttribute : attributesArray)
-					{
-						String[] tempAttributeArray = tempAttribute.split(",");
-						String srcColName = tempAttributeArray[0].substring(tempAttributeArray[0].lastIndexOf(".")+1);
-						String tgtColName = tempAttributeArray[1].substring(tempAttributeArray[1].lastIndexOf(".")+1);
-						configDetailList.add(new AccuracyHiveJobConfigDetail(
-								srcAsset.getColId(srcColName), srcColName
-								, tgtAsset.getColId(tgtColName), tgtColName
-								,tempAttributeArray[3], Boolean.parseBoolean(tempAttributeArray[2].toUpperCase())
-								) );
-					}
-					config.setAccuracyMapping(configDetailList);
-					config.setSource(srcAsset.getAssetName());
-					config.setTarget(tgtAsset.getAssetName());
-
-					config.setSrcPartitions(getPartitionList(srcAsset, ts));
-
-					List<List<PartitionConfig>> tgtPartitions = new ArrayList<List<PartitionConfig>>();
-					tgtPartitions.add(getPartitionList(tgtAsset, ts));
-					if(model.getSchedule()==ScheduleType.HOURLY 
-					                && model.getSystem()==SystemType.BULLSEYE) {
-						tgtPartitions.add(getPartitionList(tgtAsset, ts+3600000));
-					}
-
-					config.setTgtPartitions(tgtPartitions);
-
-					Gson gson = new Gson();
-					runningParameter.append(gson.toJson(config)+System.getProperty("line.separator"));
-
-				} else if(jobtype==ModelType.VALIDITY) {
-
-					String modelList = eachJob.getModelList();
-					long ts = eachJob.getStarttime();
-					Date dt = new Date(ts);
-					SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
-					String dateString = formatter.format(dt);
-					SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
-					String hourString = formatter2.format(dt);
-
-					List<String> models = new ArrayList<String>();
-					if(!modelList.contains(ScheduleModelSeperator.SEPERATOR))
-					{
-						models.add(modelList);
-					}
-					else
-					{
-						models = Arrays.asList(modelList.split(ScheduleModelSeperator.SPLIT_SEPERATOR));
-					}
-
-
-					if(models.size()==0) return;
-					logger.debug("+++ model id value: " + models.get(0));
-					DqModel model = dqModelRepo.findByColumn("modelId", models.get(0));
-					logger.debug("--- model: " + model);
-					if(model == null){
-						continue ;
-					}
-					DataAsset srcAsset = dataAssetRepo.getById((long)model.getAssetId());
-
-
-					doneFiles.append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(),dateString,hourString)
-							+System.getProperty("line.separator"));
-
-					ValidateHiveJobConfig config = new ValidateHiveJobConfig(srcAsset.getAssetName());
-					config.setTimePartitions(getPartitionList(srcAsset, ts));
-
-					for(String modelname : models) {
-						model = dqModelRepo.findByColumn("modelId", modelname);
-						if(model==null) {
-							logger.warn("===================can not find model "+modelname);
-							continue;
-						}
-
-						String content = model.getModelContent();
-						String[] contents = content.split("\\|");
-						String calType = contents[2];
-						String calColname = contents[3];
-
-						config.addColumnCalculation(srcAsset.getColId(calColname), calColname, Integer.parseInt(calType));
-					}
-
-					Gson gson = new Gson();
-					runningParameter.append(gson.toJson(config)+System.getProperty("line.separator"));
-				}
-
-
-
-
-				logger.info( "===================="+env.getProperty("job.local.folder")+File.separator+jobid+File.separator+"cmd.txt");
-
-				String dir = env.getProperty("job.local.folder")+File.separator+jobid+File.separator+"cmd.txt";
-				createFile(dir);
-				File file = new File(dir);
-				FileWriter fw = new FileWriter(file.getAbsoluteFile());
-				BufferedWriter bw = new BufferedWriter(fw);
-				bw.write(runningParameter.toString());
-				bw.flush();
-				bw.close();
-
-				String dir2 = env.getProperty("job.local.folder")+File.separator+jobid+File.separator+"watchfile.txt";
-				createFile(dir2);
-				File file2 = new File(dir2);
-				FileWriter fw2 = new FileWriter(file2.getAbsoluteFile());
-				BufferedWriter bw2 = new BufferedWriter(fw2);
-				bw2.write(doneFiles.toString());
-				bw2.flush();
-				bw2.close();
-
-				logger.info("====================create file done");
-
-				if(environment.equals("prod")) {
-					String hdfs = env.getProperty("job.hdfs.folder")+"/"+env.getProperty("job.hdfs.runningfoldername");
-					Process process1 = Runtime.getRuntime().exec("hadoop fs -mkdir "+hdfs+File.separator+jobid);
-					logger.info("====================hadoop fs -mkdir "+hdfs+File.separator+jobid);
-					process1.waitFor();;
-					Process process2 = Runtime.getRuntime().exec("hadoop fs -put "+dir+" "+hdfs+File.separator+jobid+File.separator);
-					logger.info( "====================hadoop fs -put "+dir+" "+hdfs+File.separator+jobid+File.separator);
-					process2.waitFor();
-					Process process2_1 = Runtime.getRuntime().exec("hadoop fs -put "+dir2+" "+hdfs+File.separator+jobid+File.separator+"_watchfile");
-					logger.info("====================hadoop fs -put "+dir2+" "+hdfs+File.separator+jobid+File.separator+"_watchfile");
-					process2_1.waitFor();
-					Process process3 = Runtime.getRuntime().exec("hadoop fs -touchz "+hdfs+File.separator+jobid+File.separator+"_type_"+jobtype+".done");
-					logger.info( "====================hadoop fs -touchz "+hdfs+File.separator+jobid+File.separator+"_type_"+jobtype+".done");
-					process3.waitFor();
-
-				}
-
-				//file.delete();
-				new File(env.getProperty("job.local.folder")+File.separator+jobid).delete();
-				logger.info( "====================delete file done");
-
-				eachJob.setStatus(JobStatus.WAITING);
-				jobRepo.update(eachJob);
-				logger.info("====================udpate status done");
-			}
-
-
-		} catch(Exception e) {
-			logger.error(e.toString(), e);
-		}
-
-	}
-
-	List<PartitionConfig> getPartitionList(DataAsset srcAsset, long ts) {
-		Date dt = new Date(ts);
-		List<PartitionConfig> partitions = new ArrayList<PartitionConfig>();
-		List<PartitionFormat> lv1partitions = srcAsset.getPartitions();
-		if(lv1partitions!=null)
-		{
-			for(PartitionFormat tempPartitionFormat : lv1partitions)
-			{
-				SimpleDateFormat tempFormatter = new SimpleDateFormat(tempPartitionFormat.getFormat());
-				String tempdateString = tempFormatter.format(dt);
-				partitions.add(new PartitionConfig(tempPartitionFormat.getName(), tempdateString));
-			}
-		}
-		return partitions;
-	}
-
-	void checkAllJOBSStatus() {
-		try {
-			Properties env = new Properties();
-			env.load(Thread.currentThread().getContextClassLoader()
-					.getResourceAsStream("application.properties"));
-			String hdfsbasedir = env.getProperty("job.hdfs.folder");
-			String runningfoldername = env.getProperty("job.hdfs.runningfoldername");
-			String historyfoldername = env.getProperty("job.hdfs.historyfoldername");
-			String failurefoldername = env.getProperty("job.hdfs.failurefoldername");
-			String environment = env.getProperty("env");
-			String localdir = env.getProperty("job.local.folder");
-			if(!environment.equals("prod")) return;
-
-			int result;
-			Process processLV1 = Runtime.getRuntime().exec("hadoop fs -ls "+hdfsbasedir+"/"+runningfoldername);
-			result = processLV1.waitFor();
-			if(result != 0) {
-				logger.info("===================="+"hadoop fs -ls "+hdfsbasedir+"/"+runningfoldername+" error");
-				return;
-			}
-
-			BufferedReader readerLV1 = new BufferedReader(new InputStreamReader(processLV1.getInputStream()));
-			String lineLV1;
-			int index;
-			while ((lineLV1 = readerLV1.readLine()) != null) {
-				index = lineLV1.indexOf("/");
-				if(index==-1) continue;
-				String runningJobFolderLV1Dir = lineLV1.substring(index);
-				logger.info("===================checking hdfs folder"+runningJobFolderLV1Dir); 
-				Process processLV2 = Runtime.getRuntime().exec("hadoop fs -ls "+runningJobFolderLV1Dir);
-				result = processLV2.waitFor();
-				if(result != 0)
-				{
-					logger.warn("===================="+"hadoop fs -ls "+runningJobFolderLV1Dir+" error");
-					continue;
-				}
-				BufferedReader readerLV2 = new BufferedReader(new InputStreamReader(processLV2.getInputStream()));
-//				if(readerLV2==null) return;
-
-				String lineLV2;
-				int startindi = 0;
-				int resultindi = 0;
-				int logindi = 0;
-				while ((lineLV2 = readerLV2.readLine()) != null) {
-					index = lineLV2.indexOf("/");
-					if(index==-1) continue;
-					String runningJobContentDir = lineLV2.substring(index);
-					logger.info("===================checking hdfs folder"+runningJobContentDir);
-					if(runningJobContentDir.indexOf(startFile)!=-1)
-						startindi = 1;
-					else if(runningJobContentDir.indexOf(resultFile)!=-1)
-						resultindi = 1;
-					else if(runningJobContentDir.indexOf(logFile)!=-1)
-						logindi = 1;
-				}
-
-				String jobID = runningJobFolderLV1Dir.substring(runningJobFolderLV1Dir.indexOf(runningfoldername)+runningfoldername.length()+1);
-				logger.info("===================job id: "+jobID);
-				DqJob job = jobRepo.getById(jobID);
-				if(job==null) {
-					logger.warn("===================no such job: "+job);
-					continue;
-				}
-
-				if(startindi == 1) {
-					logger.info("===================start");
-					if(environment.equals("prod")) {
-					    job.setJobType(JobStatus.STARTED); // FIXME numeric issue???!!!
-					    jobRepo.update(job);
-					}
-					logger.info("===================udpate job status to started");
-					//					Process processChangeStartFile = Runtime.getRuntime().exec("hadoop fs -mv "+runningJobFolderLV1Dir+"/"+startFile+" "+runningJobFolderLV1Dir+"/_RUNNING");
-					//					result = processChangeStartFile.waitFor();
-				}
-
-				if(resultindi ==1) {
-					logger.info("===================finished");
-
-					if(environment.equals("prod")) {
-						String historyJobFolderLV1Dir = runningJobFolderLV1Dir.replaceAll(runningfoldername, historyfoldername);
-						Process processMoveFolder = Runtime.getRuntime().exec("hadoop fs -mv "+runningJobFolderLV1Dir+" "+historyJobFolderLV1Dir);
-						result = processMoveFolder.waitFor();
-						if(result != 0)
-						{
-							logger.warn("===================="+"hadoop fs -mv "+runningJobFolderLV1Dir+" "+historyJobFolderLV1Dir+" error");
-							continue;
-						}
-						logger.info("===================moved to history folder");
-						
-						logger.info("===================publish metrics.");
-
-						String hdfs = env.getProperty("job.hdfs.folder")+"/"+env.getProperty("job.hdfs.historyfoldername");
-						String resultLocalFileDir = localdir+File.separator+jobID+File.separator+resultFile;
-						createFile(resultLocalFileDir);
-						new File(resultLocalFileDir).delete();
-						Process process1 = Runtime.getRuntime().exec("hadoop fs -get "+hdfs+File.separator+jobID+File.separator+resultFile+" "+resultLocalFileDir);
-						logger.info("====================hadoop fs -get "+hdfs+File.separator+jobID+File.separator+resultFile+" "+resultLocalFileDir);
-						process1.waitFor();
-
-						File rFile = new File(resultLocalFileDir);
-						BufferedReader reader = new BufferedReader(new FileReader(rFile));
-						String resultValue = reader.readLine();
-						IOUtils.closeQuietly(reader);
-
-						String metricsNames = jobID.substring(0, jobID.lastIndexOf("_"));
-						List<String> metricsNameArray = new ArrayList<String>();
-						if(!metricsNames.contains(ScheduleModelSeperator.SEPERATOR))
-						{
-							metricsNameArray.add(metricsNames);
-						} else {
-							metricsNameArray = Arrays.asList(metricsNames.split(ScheduleModelSeperator.SPLIT_SEPERATOR));
-						}
-
-						for(String metricsName : metricsNameArray) {
-							DqModel model = dqModelRepo.findByName(metricsName);
-							if(model.getModelType() == ModelType.ACCURACY) {
-								float floatResultValue = -1;
-								long ts = -1;
-								try{
-									floatResultValue = Float.parseFloat(resultValue);
-									ts = Long.parseLong(jobID.substring(jobID.lastIndexOf("_")+1));
-								} catch(Exception e) {
-									logger.error(e.toString(), e);
-								}
-
-								if(floatResultValue >= 0 && ts>= 0) {
-									DqMetricsValue newDQMetricsValue = new DqMetricsValue();
-									newDQMetricsValue.setMetricName(jobID.substring(0, jobID.lastIndexOf("_")));
-									newDQMetricsValue.setTimestamp(ts);
-									newDQMetricsValue.setValue(floatResultValue);
-									logger.info("===================new accuracy dq metrics: "+newDQMetricsValue.getMetricName()+" "+newDQMetricsValue.getTimestamp()+" "+newDQMetricsValue.getTimestamp());
-									dqMetricsService.insertMetadata(newDQMetricsValue);
-
-//									object.put("endtime", new Date().getTime());
-//									object.put("value", value);
-									job.setEndtime(ts);
-									job.setValue(ts);
-									jobRepo.update(job);
-								}
-
-								//insert missing data path to mongo
-
-								SampleFilePathLKP sfp = new SampleFilePathLKP();
-
-								sfp.setHdfsPath(historyJobFolderLV1Dir + "/" + "missingRec.txt");
-								sfp.setModelName(jobID.substring(0, jobID.lastIndexOf("_")));
-								sfp.setTimestamp(ts);
-
-								dqMetricsService.insertSampleFilePath(sfp);
-
-
-							} else if(model.getModelType() == ModelType.VALIDITY) {
-								Gson gson = new Gson();
-								ValidateHiveJobConfig resultObject = gson.fromJson(resultValue.toString(), ValidateHiveJobConfig.class);
-								String content = model.getModelContent();
-								String[] contents = content.split("\\|");
-								String calType = contents[2];
-								String calColname = contents[3];
-								long tempValue = resultObject.getValue(calColname, Integer.parseInt(calType));
-
-								long ts = -1;
-								try{
-									ts = Long.parseLong(jobID.substring(jobID.lastIndexOf("_")+1));
-								} catch(Exception e) {
-									logger.warn(e.toString(), e);
-								}
-
-								if(tempValue >= 0 && ts>= 0) {
-									DqMetricsValue newDQMetricsValue = new DqMetricsValue();
-									newDQMetricsValue.setMetricName(metricsName);
-									newDQMetricsValue.setTimestamp(ts);
-									newDQMetricsValue.setValue(tempValue);
-									logger.warn("===================new validity dq metrics: "+metricsName+" "+ts+" "+tempValue);
-									dqMetricsService.insertMetadata(newDQMetricsValue);
-
-									job.setEndtime(ts);
-									job.setValue(tempValue);
-									jobRepo.update(job);
-								}
-							}
-
-							logger.warn("===================publish metrics done.");
-						}
-					}
-
-					job.setJobType(JobStatus.FINISHED);
-					jobRepo.update(job);
-
-				} else if(logindi == 1 && resultindi == 0) {
-					if(environment.equals("prod")) {
-						String failureJobFolderLV1Dir = runningJobFolderLV1Dir.replaceAll(runningfoldername, failurefoldername);
-						Process processMoveFolder = Runtime.getRuntime().exec("hadoop fs -mv "+runningJobFolderLV1Dir+" "+failureJobFolderLV1Dir);
-						result = processMoveFolder.waitFor();
-						if(result != 0)
-						{
-							logger.warn("===================="+"hadoop fs -mv "+runningJobFolderLV1Dir+" "+failureJobFolderLV1Dir+" error");
-							continue;
-						}
-						logger.warn("===================moved to history folder");
-					}
-				} else {
-					logger.warn("===================waiting");
-				}
-
-			}
-
-
-
-		} catch (Exception e) {
-			logger.warn(e.toString(), e);
-		}
-	}
-
-	boolean createFile(String destFileName) {
-		File file = new File(destFileName);
-		if(file.exists()) {
-			return false;
-		}
-		if (destFileName.endsWith(File.separator)) {
-			return false;
-		}
-		if(!file.getParentFile().exists()) {
-			if(!file.getParentFile().mkdirs()) {
-				return false;
-			}
-		}
-		try {
-			if (file.createNewFile()) {
-				return true;
-			} else {
-				return false;
-			}
-		} catch (IOException e) {
-			e.printStackTrace();
-			return false;
-		}
-	}
-
-    protected void updateModelStatus(int fromStatus, int toStatus) {
-        List<DqModel> allmodels = dqModelRepo.getByStatus(fromStatus);
-        for (DqModel model : allmodels) {
-            List<DqMetricsValue> allMetrics = metricsRepo.getByMetricsName(model.getModelName());
-            if (allMetrics.size() >= DqModelCreator.MIN_TESTING_JOB_NUMBER) {
-                model.setStatus(toStatus);
-                dqModelRepo.update(model);
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/DummyLoginService.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/DummyLoginService.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/DummyLoginService.java
deleted file mode 100644
index e0cbe91..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/DummyLoginService.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- Copyright (c) 2016 eBay Software Foundation.
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import org.springframework.stereotype.Service;
-
-@Service
-public class DummyLoginService implements LoginService {
-
-    @Override
-    public String login(String ntUser, String password) {
-        return ntUser;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/LoginService.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/LoginService.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/LoginService.java
deleted file mode 100644
index f21e45c..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/LoginService.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-
-
-public interface LoginService {
-	public String login(String ntUser, String password);
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/ModelInputConverter.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/ModelInputConverter.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/ModelInputConverter.java
deleted file mode 100644
index 162c46e..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/ModelInputConverter.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.ebay.oss.griffin.service;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.vo.ModelBasicInputNew;
-import com.ebay.oss.griffin.vo.ModelExtraInputNew;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("modelInputConverter")
-public class ModelInputConverter implements Converter<DqModel, ModelInput> {
-
-    @Override
-    public ModelInput voOf(DqModel dqModel) {
-		if(dqModel == null){
-			return null;
-		}
-
-		// Object result = sourceObject;
-		int modelType = dqModel.getModelType();
-		ModelInput result = new ModelInput();
-		result.setBasic(getViewModelForFront(dqModel));
-
-		if (modelType == ModelType.ACCURACY) {
-			result.parseFromString(dqModel.getModelContent());
-		} else if (modelType == ModelType.VALIDITY) {
-
-			ModelExtraInputNew extra = result.getExtra();
-			String content = dqModel.getModelContent();
-			String[] contents = content.split("\\|");
-			extra.setSrcDb(contents[0]);
-			extra.setSrcDataSet(contents[1]);
-			extra.setVaType(Integer.parseInt(contents[2]));
-			extra.setColumn(contents[3]);
-
-
-		} else if (modelType == ModelType.ANOMALY) {
-
-			ModelExtraInputNew extra = result.getExtra();
-			String content = dqModel.getModelContent();
-			String[] contents = content.split("\\|");
-			extra.setSrcDb(contents[0]);
-			extra.setSrcDataSet(contents[1]);
-			int type = Integer.parseInt(contents[2]);
-			extra.setAnType(type);
-
-		} else if (modelType == ModelType.PUBLISH) {
-
-			result.getExtra().setPublishUrl(dqModel.getModelContent());
-		}
-
-		return result;
-	}
-
-	ModelBasicInputNew getViewModelForFront(DqModel sourceObject) {
-		ModelBasicInputNew basic = new ModelBasicInputNew();
-		basic.setDesc(sourceObject.getModelDesc());
-		basic.setName(sourceObject.getModelName());
-		basic.setDataaset(sourceObject.getAssetName());
-		basic.setDataasetId(sourceObject.getAssetId());
-		basic.setStatus(sourceObject.getStatus());
-		basic.setType(sourceObject.getModelType());
-		basic.setScheduleType(sourceObject.getSchedule());
-		basic.setSystem(sourceObject.getSystem());
-		basic.setEmail(sourceObject.getNotificationEmail());
-		basic.setOwner(sourceObject.getOwner());
-		basic.setThreshold(sourceObject.getThreshold());
-
-		return basic;
-	}
-
-    @Override
-    public DqModel entityOf(ModelInput vo) {
-        // TODO Auto-generated method stub
-        throw new RuntimeException("not implemented yet...");
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationService.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationService.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationService.java
deleted file mode 100644
index 816944b..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationService.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation. 
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-	
-	    http://www.apache.org/licenses/LICENSE-2.0
-	
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
-*/	
-package com.ebay.oss.griffin.service;
-
-import java.util.List;
-
-import com.ebay.oss.griffin.vo.NotificationRecord;
-
-public interface NotificationService {
-	public void insert(NotificationRecord record);
-	public List<NotificationRecord> getAll();
-	public void delete(NotificationRecord record);
-	public void delete(int id);
-	public NotificationRecord get(int id);
-	public List<NotificationRecord> getTop(int limit);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationServiceImpl.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationServiceImpl.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationServiceImpl.java
deleted file mode 100644
index 264e5f8..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/NotificationServiceImpl.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.springframework.stereotype.Service;
-
-import com.ebay.oss.griffin.vo.NotificationRecord;
-
-@Service
-public class NotificationServiceImpl implements NotificationService {
-	private static List<NotificationRecord> records = new ArrayList<NotificationRecord>();
-	private static int count = 1;
-
-	@Override
-	public void insert(NotificationRecord record) {
-		record.setId(count++);
-		records.add(0, record);
-
-	}
-
-	@Override
-	public List<NotificationRecord> getAll() {
-		return records;
-	}
-
-	@Override
-	public void delete(NotificationRecord record) {
-		int index = records.indexOf(record);
-		if(index > -1){
-			records.remove(index);
-		}else if(record.getId()>0){
-			delete(record.getId());
-		}
-
-	}
-
-	@Override
-	public void delete(int id) {
-		int length = records.size();
-		for(int i = 0; i < length; i ++){
-			if(records.get(i).getId() == id){
-				records.remove(i);
-				break;
-			}
-		}
-
-	}
-
-	@Override
-	public NotificationRecord get(int id) {
-		int length = records.size();
-		for(int i = 0; i < length; i ++){
-			if(records.get(i).getId() == id){
-				return records.get(i);
-			}
-		}
-		return null;
-	}
-
-	@Override
-	public List<NotificationRecord> getTop(int limit) {
-		// TODO Auto-generated method stub
-		return records.subList(0, limit);
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetrcsCalc.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetrcsCalc.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetrcsCalc.java
deleted file mode 100644
index 87b8e02..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetrcsCalc.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import com.ebay.oss.griffin.vo.SystemLevelMetricsList;
-
-public interface RefMetrcsCalc {
-
-    void calc(SystemLevelMetricsList totalSystemLevelMetricsList);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetricsCalcImpl.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetricsCalcImpl.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetricsCalcImpl.java
deleted file mode 100644
index d1f50f8..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/RefMetricsCalcImpl.java
+++ /dev/null
@@ -1,347 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-import org.springframework.util.StringUtils;
-
-import com.ebay.oss.griffin.domain.AnomalyType;
-import com.ebay.oss.griffin.domain.DqMetricsValue;
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.MetricType;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.domain.ScheduleType;
-import com.ebay.oss.griffin.domain.SystemType;
-import com.ebay.oss.griffin.repo.DqMetricsRepo;
-import com.ebay.oss.griffin.repo.DqModelRepo;
-import com.ebay.oss.griffin.vo.AssetLevelMetricsDetail;
-import com.ebay.oss.griffin.vo.BollingerBandsEntity;
-import com.ebay.oss.griffin.vo.MADEntity;
-import com.ebay.oss.griffin.vo.SystemLevelMetricsList;
-
-@Component
-public class RefMetricsCalcImpl implements RefMetrcsCalc {
-
-    private static Logger logger = LoggerFactory.getLogger(RefMetricsCalcImpl.class);
-
-	@Autowired
-	DqModelService dqModelService;
-
-	@Autowired
-	DqModelRepo modelRepo;
-	
-	@Autowired
-    DqMetricsRepo metricsRepo;
-
-	Map<String, String> modelName_system;
-	String getSystemType(String modelName) {
-	    if(modelName_system==null) {
-	        modelName_system = new HashMap<String, String>();
-
-	        for(DqModel model : modelRepo.getAll()) {
-	            modelName_system.put(model.getModelName(), SystemType.val(model.getModelType()));
-	        }
-	    }
-	    return modelName_system.get(modelName);
-    }
-
-    @Override
-    public void calc(SystemLevelMetricsList totalSystemLevelMetricsList) {
-		Map<String, List<String>> references = getReferences();
-		
-		for(String modelName : references.keySet()) {
-			List<String> refNames = references.get(modelName);
-
-			for (String referencerName : refNames) {
-			    calc(modelName, referencerName, totalSystemLevelMetricsList);
-			}
-		}
-	}
-
-	private void calc(String modelName, String referencerName, SystemLevelMetricsList totalSystemLevelMetricsList) {
-	    logger.info("==============anmoni loop start==================" + referencerName + " " + modelName);
-	    DqModel refModel = dqModelService.getGeneralModel(referencerName);
-	    if (refModel == null) {
-	        logger.warn("==============referencerModel is null================== "+referencerName);
-//	        return false;
-	        return;
-	    }
-
-	    DqModel sourceModel = dqModelService.getGeneralModel(modelName);
-	    if (sourceModel == null) {
-	        logger.warn("==============sourceModel is null================== "+sourceModel);
-//	        return false;
-	        return;
-	    }
-	    if (refModel.getModelType() != ModelType.ANOMALY) {
-	        logger.warn("==============non-anomaly model founded================== "+referencerName);
-//	        return true;
-	        return;
-	    }
-
-//	    public static int trendLength = 20 * 24;
-//	    public static int trendOffset = 24 * 7;
-
-	    List<DqMetricsValue> metricList = metricsRepo.getByMetricsName(modelName);
-	    Collections.sort(metricList);
-	        
-	    String content = refModel.getModelContent();
-	    String[] contents = content.split("\\|");
-	    int type = Integer.parseInt(contents[2]);
-
-	    if (type == AnomalyType.HISTORY_TREND) {
-	        calcHistoryRefModel(sourceModel, refModel,
-	                                    metricList,totalSystemLevelMetricsList);
-	    } else if (type == AnomalyType.BOLLINGER_BANDS) {
-	        calcBollingerRefModel(modelName, referencerName, metricList
-	                                            , totalSystemLevelMetricsList);
-	    } else if (type == AnomalyType.MAD) {
-	        calcMad(modelName, referencerName, refModel, metricList, totalSystemLevelMetricsList);
-	    }
-	    logger.info("==============anmoni loop end==================" + referencerName + " " + modelName);
-
-    }
-    protected void calcHistoryRefModel(DqModel sourceModel, DqModel refModel
-                                            , List<DqMetricsValue> metricList
-                                            , SystemLevelMetricsList totalSystemLevelMetricsList) {
-        int trendLength, trendOffset;
-        if (sourceModel.getSchedule() == ScheduleType.DAILY) {
-            trendLength = 20;
-            trendOffset = 7;
-        } else {
-            trendLength = 20 * 24;
-            trendOffset = 7 * 24;
-        }
-
-        if (metricList.size() <= trendLength + trendOffset) {
-            return;
-//            return false;
-        }
-
-        String modelName = sourceModel.getModelName();
-        String referencerName = refModel.getModelName();
-        logger.info("==============trend start=================="
-                        + referencerName
-                        + " "
-                        + modelName
-                        + " "
-                        + trendLength + " " + trendOffset);
-
-        int dqfail = 0;
-        float threadshold = refModel.getThreshold();
-        if (metricList.get(0).getValue() / metricList.get(trendOffset) .getValue() >= 1 + threadshold
-                        || metricList.get(0).getValue() / metricList.get(trendOffset) .getValue() <= 1 - threadshold) {
-            dqfail = 1;
-        }
-
-        for (int i = 0; i <= trendLength; i++) {
-            DqMetricsValue tempDQMetricsValue = metricList.get(i);
-            float lastValue = metricList.get( i + trendOffset).getValue();
-            totalSystemLevelMetricsList.upsertNewAssetExecute(
-                            referencerName,
-                            MetricType.Trend.toString(),
-                            tempDQMetricsValue.getTimestamp(),
-                            tempDQMetricsValue.getValue()
-                            , getSystemType(tempDQMetricsValue.getMetricName())
-                            , dqfail,
-                            true, new AssetLevelMetricsDetail(lastValue));
-        }
-
-        logger.info("==============trend end==================");
-    }
-
-
-    protected void calcMad(String modelName, String referencerName, DqModel refModel,
-                    List<DqMetricsValue> metricList,
-                    SystemLevelMetricsList totalSystemLevelMetricsList) {
-        logger.info("==============MAD start==================" + referencerName + " " + modelName);
-        Collections.reverse(metricList);
-        List<String> sourceValues = new ArrayList<String>();
-        for (int i = 0; i < metricList.size(); i++) {
-            sourceValues.add((long) metricList.get(i).getValue() + "");
-        }
-        List<MADEntity> madList = createMad(sourceValues);
-
-        logger.info("==============MAD size : "+madList.size() +" metrics size:"+metricList.size());
-        if (metricList.size() > 0 && madList.size() > 0) {
-            int dqfail = 0;
-            if (metricList.get( metricList.size() - 1).getValue() 
-                            < madList.get(madList.size() - 1).getLower()) {
-                dqfail = 1;
-            }
-
-            int offset = metricList.size() - madList.size();
-            for (int i = offset; i < metricList.size(); i++) {
-                DqMetricsValue tempDQMetricsValue = metricList.get(i);
-
-                MADEntity mad = madList.get( i - offset).clone();
-                AssetLevelMetricsDetail detail = new AssetLevelMetricsDetail( mad);
-                totalSystemLevelMetricsList.upsertNewAssetExecute(
-                                                referencerName,
-                                                MetricType.MAD.toString(),
-                                                tempDQMetricsValue.getTimestamp(),
-                                                tempDQMetricsValue.getValue(),
-                                                getSystemType(tempDQMetricsValue.getMetricName()),
-                                                dqfail,
-                                                true,
-                                                detail
-                                                );
-            }
-        }
-        logger.info("==============MAD end==================");
-    }
-
-    protected void calcBollingerRefModel(String modelName, String referencerName,
-                    List<DqMetricsValue> metricList,
-                    SystemLevelMetricsList totalSystemLevelMetricsList) {
-        logger.info("==============Bollinger start=================="
-                        + referencerName + " " + modelName);
-        Collections.reverse(metricList);
-        List<String> sourceValues = new ArrayList<String>();
-        for (int i = 0; i < metricList.size(); i++) {
-            sourceValues.add((long) metricList.get(i)
-                            .getValue() + "");
-        }
-
-        List<BollingerBandsEntity> bollingers = bollingerBand(sourceValues);
-
-        logger.info("==============Bollinger size : "+bollingers.size() +" metrics size:"+metricList.size());
-        if (metricList.size() > 0 && bollingers.size() > 0) {
-            int dqfail = 0;
-            if (metricList.get( metricList.size() - 1).getValue() 
-                            < bollingers.get(bollingers.size() - 1).getLower()) {
-                dqfail = 1;
-            }
-
-            int offset = metricList.size()- bollingers.size();
-            for (int i = offset; i < metricList.size(); i++) {
-                DqMetricsValue tempDQMetricsValue = metricList.get(i);
-
-                BollingerBandsEntity bollinger = bollingers.get(i - offset).clone();
-                AssetLevelMetricsDetail detail = new AssetLevelMetricsDetail(bollinger);
-                totalSystemLevelMetricsList.upsertNewAssetExecute(
-                                referencerName,
-                                MetricType.Bollinger.toString(),
-                                tempDQMetricsValue.getTimestamp(),
-                                tempDQMetricsValue.getValue(),
-                                getSystemType(tempDQMetricsValue.getMetricName()),
-                                dqfail,
-                                true,
-                                detail
-                                );
-            }
-        }
-        logger.info("==============Bollinger end=================="
-                        + referencerName + " " + modelName);
-    }
-
-    private List<String> parseRefNames(String reference) {
-	    List<String> refNames = new ArrayList<String>();
-	    if (reference.indexOf(",") == -1) {
-	        refNames.add(reference);
-	    } else {
-	        refNames = Arrays.asList(reference.split(","));
-	    }
-	    return refNames;
-    }
-
-	/** <modelName, [refModelName]> */
-    Map<String, List<String>> getReferences() {
-	    Map<String, List<String>> map = new HashMap<>();
-	    for(DqModel each : modelRepo.getAll()) {
-	        String modelName = each.getModelName();
-	        String references = each.getReferenceModel();
-	        if(!map.containsKey(each.getModelName())) {
-	            map.put(modelName, new ArrayList<String>());
-	        }
-	        if(StringUtils.hasText(references)) {
-	            for(String ref : parseRefNames(references)) {
-	                map.get(modelName).add(ref.trim());
-	            }
-	        }
-	    }
-	    return map;
-    }
-
-	List<BollingerBandsEntity> bollingerBand(List<String> list) {
-		List<BollingerBandsEntity> result = new ArrayList<BollingerBandsEntity>();
-		int preparePointNumber = 30;
-		float up_coff = 1.8f;
-		float down_coff = 1.8f;
-		for (int i = preparePointNumber; i < list.size(); i++) {
-			long total = 0;
-			for (int j = i - preparePointNumber; j < i; j++) {
-				long rawNumber = Long.parseLong(list.get(j));
-				total = total + rawNumber;
-			}
-			long mean = total / preparePointNumber;
-			long meantotal = 0;
-			for (int j = i - preparePointNumber; j < i; j++) {
-				long rawNumber = Integer.parseInt(list.get(j));
-				long rawDiff = rawNumber - mean;
-				meantotal += rawDiff * rawDiff;
-			}
-			long mad = (long) Math.sqrt(meantotal / preparePointNumber);
-			long upper = (long) (mean + mad * up_coff);
-			long lower = (long) (mean - mad * down_coff);
-			// .out.println( list.get(i)+"\t"+upper +"\t"+lower);
-			result.add(new BollingerBandsEntity(upper, lower, mean));
-		}
-		logger.info("bollingerband done");
-		return result;
-	}
-
-    List<MADEntity> createMad(List<String> list) {
-		List<MADEntity> result = new ArrayList<MADEntity>();
-		int preparePointNumber = 15;
-		float up_coff = 2.3f;
-		float down_coff = 2.3f;
-		for (int i = preparePointNumber; i < list.size(); i++) {
-			long total = 0;
-			for (int j = i - preparePointNumber; j < i; j++) {
-				long rawNumber = Long.parseLong(list.get(j));
-				total = total + rawNumber;
-			}
-			long mean = total / preparePointNumber;
-			long meantotal = 0;
-			for (int j = i - preparePointNumber; j < i; j++) {
-				long rawNumber = Integer.parseInt(list.get(j));
-				long rawDiff = rawNumber - mean;
-				if (rawDiff >= 0)
-					meantotal = meantotal + rawDiff;
-				else
-					meantotal = meantotal - rawDiff;
-			}
-			long mad = meantotal / preparePointNumber;
-			long upper = (long) (mean + mad * up_coff);
-			long lower = (long) (mean - mad * down_coff);
-
-			result.add(new MADEntity(upper, lower));
-		}
-		logger.info("mad done");
-		return result;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeService.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeService.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeService.java
deleted file mode 100644
index 8755df0..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeService.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import com.ebay.oss.griffin.domain.UserSubscription;
-
-
-public interface SubscribeService {
-
-	void subscribe(UserSubscription item);
-
-	UserSubscription getSubscribe(String user);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeServiceImpl.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeServiceImpl.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeServiceImpl.java
deleted file mode 100644
index 562271a..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/SubscribeServiceImpl.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.service;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Service;
-
-import com.ebay.oss.griffin.domain.UserSubscription;
-import com.ebay.oss.griffin.repo.UserSubscriptionRepo;
-
-@Service
-public class SubscribeServiceImpl implements SubscribeService{
-	
-	final static Logger logger = LoggerFactory.getLogger(SubscribeServiceImpl.class);
-	
-	@Autowired
-    UserSubscriptionRepo subscriptionRepo;
-
-	@Override
-	public void subscribe(UserSubscription item) {
-		subscriptionRepo.upsertUserSubscribe(item);
-	}
-
-	@Override
-	public UserSubscription getSubscribe(String user) {
-		return subscriptionRepo.getUserSubscribeItem(user);
-	}
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AccurcyModelCreator.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AccurcyModelCreator.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AccurcyModelCreator.java
deleted file mode 100644
index b6f5ef6..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AccurcyModelCreator.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelStatus;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.vo.MappingItemInput;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("accuracyModelCreator")
-public class AccurcyModelCreator extends BaseModelCreator {
-
-    @Override
-    protected void enhance(DqModel entity, ModelInput input) {
-        entity.setStatus(ModelStatus.TESTING);
-
-        newSampleJob4Model(entity);
-    }
-
-    @Override
-    public boolean isSupport(ModelInput input) {
-        return input.getBasic() != null && input.getBasic().getType() == ModelType.ACCURACY;
-    }
-
-    protected String contentOf( ModelInput input) {
-	    String content = input.getExtra().getSrcDb() + "|"
-	                    + input.getExtra().getSrcDataSet() + "|"
-	                    + input.getExtra().getTargetDb() + "|"
-	                    + input.getExtra().getTargetDataSet() + "|";
-
-	    String delimeter = "";
-	    for(MappingItemInput itm : input.getMappings()) {
-	        content += delimeter 
-	                        + itm.getSrc() + ","
-	                        + itm.getTarget() + ","
-	                        + itm.isIsPk() + ","
-	                        + itm.getMatchMethod();
-	        delimeter = ";";
-	    }
-
-	    return content;
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AnomalyModelCreator.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AnomalyModelCreator.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AnomalyModelCreator.java
deleted file mode 100644
index 1c18162..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/AnomalyModelCreator.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelStatus;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("anomalyModelCreator")
-public class AnomalyModelCreator extends BaseModelCreator {
-
-    @Override
-    public boolean isSupport(ModelInput input) {
-		return input.getBasic() != null && input.getBasic().getType() == ModelType.ANOMALY;
-
-    }
-
-    @Override
-    protected void enhance(DqModel entity, ModelInput input) {
-        entity.setStatus(ModelStatus.DEPLOYED);
-
-        DqModel countModel = createCountModel(input);
-        dqModelRepo.addReference(countModel, input.getBasic().getName());
-    }
-
-    protected String contentOf(ModelInput input) {
-        return input.getExtra().getSrcDb() + "|"
-                        + input.getExtra().getSrcDataSet() + "|"
-                        + input.getExtra().getAnType();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/BaseModelCreator.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/BaseModelCreator.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/BaseModelCreator.java
deleted file mode 100644
index 9a182de..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/BaseModelCreator.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import java.util.Calendar;
-import java.util.Date;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.ebay.oss.griffin.domain.DataAsset;
-import com.ebay.oss.griffin.domain.DqJob;
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.domain.ScheduleType;
-import com.ebay.oss.griffin.domain.ValidityType;
-import com.ebay.oss.griffin.error.BarkDbOperationException;
-import com.ebay.oss.griffin.repo.DataAssetRepo;
-import com.ebay.oss.griffin.repo.DqJobRepo;
-import com.ebay.oss.griffin.repo.DqModelRepo;
-import com.ebay.oss.griffin.service.DqModelCreator;
-import com.ebay.oss.griffin.vo.ModelBasicInputNew;
-import com.ebay.oss.griffin.vo.ModelExtraInputNew;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-public abstract class BaseModelCreator implements DqModelCreator {
-
-    private static Logger logger = LoggerFactory.getLogger(BaseModelCreator.class);
-
-    @Autowired
-	DqModelRepo dqModelRepo;
-
-    @Autowired
-    DataAssetRepo dataAssetRepo;
-
-	@Autowired
-    private DqJobRepo jobRepo;
-
-    boolean hasModelWithName(String name) {
-		return null != dqModelRepo.findByName(name);
-	}
-	@Override
-	public DqModel newModel(ModelInput input) {
-		if ( hasModelWithName(input.getBasic().getName()) ) {
-			throw new BarkDbOperationException("Record already existing");
-		}
-		
-		try {
-		    DqModel entity = createModel(input);
-		    
-		    String content = contentOf(input);
-		    entity.setModelContent(content);
-
-		    enhance(entity, input);
-			return dqModelRepo.update(entity);
-		} catch (Exception e) {
-			logger.error(e.toString());
-			throw new BarkDbOperationException("Failed to create a new Model", e);
-		}
-
-	}
-	
-    protected abstract String contentOf(ModelInput input);
-
-    protected abstract void enhance(DqModel entity, ModelInput input);
-
-    protected DqModel createModel(ModelInput input) {
-        DqModel entity = new DqModel();
-        entity.set_id(dqModelRepo.getNextId());
-        entity.setModelId(input.getBasic().getName());
-        entity.setModelName(input.getBasic().getName());
-        entity.setNotificationEmail(input.getBasic().getEmail());
-        entity.setOwner(input.getBasic().getOwner());
-        entity.setSchedule(input.getBasic().getScheduleType());
-        entity.setSystem(input.getBasic().getSystem());
-        entity.setThreshold(input.getBasic().getThreshold());
-        entity.setModelDesc(input.getBasic().getDesc());
-        entity.setTimestamp(new Date().getTime());
-        entity.setAssetName(input.getBasic().getDataaset());
-        entity.setAssetId(input.getBasic().getDataasetId());
-        entity.setReferenceModel("");
-        entity.setModelType(input.getBasic().getType());
-
-        if (input.getBasic().getStarttime() == 0) {
-            entity.setStarttime(new Date().getTime());
-        } else {
-            entity.setStarttime(input.getBasic().getStarttime());
-        }
-
-        return entity;
-    }
-    protected DqModel createCountModel(ModelInput input) {
-        DqModel countModel = dqModelRepo.findCountModelByAssetID(input.getBasic().getDataasetId());
-        if (countModel != null) {
-            return countModel;
-        } 
-
-        DataAsset asset = dataAssetRepo.getById(new Long(input.getBasic().getDataasetId()));
-        ModelBasicInputNew basic = new ModelBasicInputNew();
-        ModelExtraInputNew extra = new ModelExtraInputNew();
-        basic.setDataaset(input.getBasic().getDataaset());
-        basic.setDataasetId(input.getBasic().getDataasetId());
-        basic.setDesc("Count for " + input.getBasic().getDataaset());
-        basic.setEmail(input.getBasic().getEmail());
-        basic.setName("Count_" + input.getBasic().getName() );
-        basic.setOwner(input.getBasic().getOwner());
-        basic.setScheduleType(input.getBasic() .getScheduleType());
-        basic.setStatus(input.getBasic().getStatus());
-        basic.setSystem(input.getBasic().getSystem());
-        basic.setType(ModelType.VALIDITY);
-
-        extra.setVaType(ValidityType.TOTAL_COUNT);
-        extra.setSrcDataSet(asset.getSystem());
-        extra.setSrcDb(asset.getPlatform());
-
-        ModelInput tempCountModel = new ModelInput();
-        tempCountModel.setBasic(basic);
-        tempCountModel.setExtra(extra);
-        return newModel(tempCountModel);
-    }
-
-    void newSampleJob4Model(DqModel input) {
-		int type = input.getSchedule();
-        Calendar c = Calendar.getInstance();
-        Date date = new Date();
-        date.setMinutes(0);
-        date.setSeconds(0);
-        c.setTime(date);
-
-        for (int i = 0; i < MIN_TESTING_JOB_NUMBER; i++) {
-            if (type == ScheduleType.DAILY)
-                c.add(Calendar.DATE, -1);
-            else if (type == ScheduleType.HOURLY)
-                c.add(Calendar.HOUR, -1);
-            else if (type == ScheduleType.WEEKLY)
-                c.add(Calendar.DATE, -7);
-            else if (type == ScheduleType.MONTHLY)
-                c.add(Calendar.MONTH, -1);
-            else
-                continue;
-
-            long starttime = c.getTime().getTime() / 1000 * 1000;
-
-            DqJob job = new DqJob();
-            job.setModelList(input.getModelName());
-            job.setStarttime(starttime);
-            job.setStatus(0);
-            job.setId(input.getModelName() + "_" + starttime);
-            job.setJobType(input.getModelType());
-
-            if (jobRepo.newJob(job) == 0) {
-                logger.warn("===================new job failure");
-                continue;
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/DqModelCreatorChain.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/DqModelCreatorChain.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/DqModelCreatorChain.java
deleted file mode 100644
index 6073743..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/DqModelCreatorChain.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import java.util.List;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.service.DqModelCreator;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("modelCreatorChain")
-public class DqModelCreatorChain implements DqModelCreator {
-    
-    private final List<DqModelCreator> list;
-    
-    public DqModelCreatorChain(List<DqModelCreator> list) {
-        this.list = list;
-    }
-
-    @Override
-    public DqModel newModel(ModelInput input) {
-        for(DqModelCreator each : list) {
-            if(each.isSupport(input)) {
-                return each.newModel(input);
-            }
-        }
-        throw new RuntimeException("Unsupported ModelInput" + input.getBasic().getType());
-    }
-
-    @Override
-    public boolean isSupport(ModelInput input) {
-        return true;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/PublishModelCreator.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/PublishModelCreator.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/PublishModelCreator.java
deleted file mode 100644
index 6dfe356..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/PublishModelCreator.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelStatus;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("publishModelCreator")
-public class PublishModelCreator extends BaseModelCreator {
-
-    @Override
-    public boolean isSupport(ModelInput input) {
-        return input.getBasic() != null && input.getBasic().getType() == ModelType.PUBLISH;
-    }
-
-    @Override
-    protected void enhance(DqModel entity, ModelInput input) {
-        entity.setStatus(ModelStatus.DEPLOYED);
-    }
-
-
-    @Override
-    protected String contentOf(ModelInput input) {
-        return input.getExtra().getPublishUrl();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/ValidityModelCreator.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/ValidityModelCreator.java b/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/ValidityModelCreator.java
deleted file mode 100644
index 6c5b227..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/service/modelcreator/ValidityModelCreator.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.ebay.oss.griffin.service.modelcreator;
-
-import org.springframework.stereotype.Component;
-
-import com.ebay.oss.griffin.domain.DqModel;
-import com.ebay.oss.griffin.domain.ModelStatus;
-import com.ebay.oss.griffin.domain.ModelType;
-import com.ebay.oss.griffin.domain.ValidityType;
-import com.ebay.oss.griffin.vo.ModelInput;
-
-@Component("validityModelCreator")
-public class ValidityModelCreator extends BaseModelCreator {
-
-    @Override
-    public boolean isSupport(ModelInput input) {
-			return input.getBasic() != null && input.getBasic().getType() == ModelType.VALIDITY;
-    }
-
-    @Override
-    protected void enhance(DqModel entity, ModelInput input) {
-        if(input.getExtra().getVaType() == ValidityType.TOTAL_COUNT){
-            entity.setStatus(input.getBasic().getStatus());
-        }else{
-            entity.setStatus(ModelStatus.TESTING);
-        }
-
-        newSampleJob4Model(entity);
-    }
-
-    protected String contentOf(ModelInput input) {
-        return input.getExtra().getSrcDb() + "|"
-                        + input.getExtra().getSrcDataSet() + "|"
-                        + input.getExtra().getVaType() + "|"
-                        + input.getExtra().getColumn();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfig.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfig.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfig.java
deleted file mode 100644
index 676279d..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfig.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class AccuracyHiveJobConfig {
-	public String source;
-	public String target;
-	public List<AccuracyHiveJobConfigDetail> accuracyMapping = new ArrayList<AccuracyHiveJobConfigDetail>();
-	public List<PartitionConfig> srcPartitions = new ArrayList<PartitionConfig>();
-	public List<List<PartitionConfig>> tgtPartitions = new ArrayList<List<PartitionConfig>>();
-
-	public AccuracyHiveJobConfig() { }
-
-	public AccuracyHiveJobConfig(String source, String target) {
-		this.source = source;
-		this.target = target;
-	}
-
-	public String getSource() {
-		return source;
-	}
-
-	public void setSource(String source) {
-		this.source = source;
-	}
-
-	public String getTarget() {
-		return target;
-	}
-
-	public void setTarget(String target) {
-		this.target = target;
-	}
-
-	public List<AccuracyHiveJobConfigDetail> getAccuracyMapping() {
-		return accuracyMapping;
-	}
-
-	public void setAccuracyMapping(List<AccuracyHiveJobConfigDetail> accuracyMapping) {
-		this.accuracyMapping = accuracyMapping;
-	}
-
-	public List<PartitionConfig> getSrcPartitions() {
-		return srcPartitions;
-	}
-
-	public void setSrcPartitions(List<PartitionConfig> srcPartitions) {
-		if(srcPartitions!=null) this.srcPartitions = srcPartitions;
-	}
-
-	public List<List<PartitionConfig>> getTgtPartitions() {
-		return tgtPartitions;
-	}
-
-	public void setTgtPartitions(List<List<PartitionConfig>> tgtPartitions) {
-		if(tgtPartitions!=null) this.tgtPartitions = tgtPartitions;
-	}
-
-
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfigDetail.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfigDetail.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfigDetail.java
deleted file mode 100644
index c85152d..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AccuracyHiveJobConfigDetail.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-
-public class AccuracyHiveJobConfigDetail {
-	public int sourceColId;
-	public String sourceColName;
-	public int targetColId;
-	public String targetColName;
-	public String matchFunction;
-	public boolean isPK;
-
-	public AccuracyHiveJobConfigDetail() { }
-
-	public AccuracyHiveJobConfigDetail(int sourceColId, String sourceColName, int targetColId, String targetColName, String matchFunction, boolean isPK)
-	{
-		this.sourceColId = sourceColId;
-		this.sourceColName = sourceColName;
-		this.targetColId = targetColId;
-		this.targetColName = targetColName;
-		this.matchFunction = matchFunction;
-		this.isPK = isPK;
-	}
-
-	public int getSourceColId() {
-		return sourceColId;
-	}
-
-	public void setSourceColId(int sourceColId) {
-		this.sourceColId = sourceColId;
-	}
-
-	public String getSourceColName() {
-		return sourceColName;
-	}
-
-	public void setSourceColName(String sourceColName) {
-		this.sourceColName = sourceColName;
-	}
-
-	public int getTargetColId() {
-		return targetColId;
-	}
-
-	public void setTargetColId(int targetColId) {
-		this.targetColId = targetColId;
-	}
-
-	public String getTargetColName() {
-		return targetColName;
-	}
-
-	public void setTargetColName(String targetColName) {
-		this.targetColName = targetColName;
-	}
-
-	public String getMatchFunction() {
-		return matchFunction;
-	}
-
-	public void setMatchFunction(String matchFunction) {
-		this.matchFunction = matchFunction;
-	}
-
-	public boolean isPK() {
-		return isPK;
-	}
-
-	public void setPK(boolean isPK) {
-		this.isPK = isPK;
-	}
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetrics.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetrics.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetrics.java
deleted file mode 100644
index 05d8fa3..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetrics.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-public class AssetLevelMetrics {
-	private String name;
-	private float dq;
-	private int dqfail;
-	private long timestamp;
-	private String metricType;
-	private String assetName;
-
-	private List<AssetLevelMetricsDetail> details = new ArrayList<AssetLevelMetricsDetail>();
-
-	public AssetLevelMetrics() { }
-
-	public AssetLevelMetrics(String name) {
-		this.name = name;
-	}
-
-	public AssetLevelMetrics(String name, float dq, long timestamp) {
-		this.name = name;
-		this.dq = dq;
-		this.timestamp = timestamp;
-	}
-
-	public AssetLevelMetrics(String name, String metricType, float dq, long timestamp, int dqfail) {
-		this.name = name;
-		this.dq = dq;
-		this.timestamp = timestamp;
-		this.dqfail = dqfail;
-		this.metricType = metricType;
-	}
-
-	public AssetLevelMetrics(AssetLevelMetrics other, int count) {
-		this.name = other.getName();
-		this.dq = other.getDq();
-		this.timestamp = other.getTimestamp();
-		this.dqfail = other.getDqfail();
-		this.metricType = other.getMetricType();
-		List<AssetLevelMetricsDetail> otherDetail = other.getDetails();
-		if(count == -1) count = other.getDetails().size();
-		if(other.getDetails().size()<count) count = other.getDetails().size();
-		Collections.sort(otherDetail);
-		for(int i=0;i<count;i++) {
-			AssetLevelMetricsDetail tempAssetLevelMetricsDetail = otherDetail.get(i);
-			details.add(new AssetLevelMetricsDetail(tempAssetLevelMetricsDetail));
-		}
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	public float getDq() {
-		return dq;
-	}
-
-	public void setDq(float dq) {
-		this.dq = dq;
-	}
-
-	public long getTimestamp() {
-		return timestamp;
-	}
-
-	public void setTimestamp(long timestamp) {
-		this.timestamp = timestamp;
-	}
-
-	public List<AssetLevelMetricsDetail> getDetails() {
-		return details;
-	}
-
-	public void setDetails(List<AssetLevelMetricsDetail> details) {
-		this.details = details;
-	}
-
-	public void addAssetLevelMetricsDetail(AssetLevelMetricsDetail dq)
-	{
-		this.details.add(dq);
-	}
-
-	public int getDqfail() {
-		return dqfail;
-	}
-
-	public void setDqfail(int dqfail) {
-		this.dqfail = dqfail;
-	}
-
-	public String getMetricType() {
-		return metricType;
-	}
-
-	public void setMetricType(String metricType) {
-		this.metricType = metricType;
-	}
-
-	public String getAssetName() {
-		return assetName;
-	}
-
-	public void setAssetName(String asseetName) {
-		this.assetName = asseetName;
-	}
-
-
-
-
-
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetricsDetail.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetricsDetail.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetricsDetail.java
deleted file mode 100644
index 59f515e..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/AssetLevelMetricsDetail.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-
-public class AssetLevelMetricsDetail implements Comparable<AssetLevelMetricsDetail> {
-
-	private long timestamp;
-	private float value;
-	private BollingerBandsEntity bolling;
-	private MADEntity MAD;
-	private float comparisionValue;
-
-	public AssetLevelMetricsDetail() { }
-
-	public AssetLevelMetricsDetail(long timestamp, float value) {
-		this.timestamp = timestamp;
-		this.value = value;
-	}
-
-	public AssetLevelMetricsDetail(long timestamp, float value,
-			BollingerBandsEntity bolling) {
-		this.timestamp = timestamp;
-		this.value = value;
-		this.bolling = bolling;
-	}
-
-	public AssetLevelMetricsDetail(long timestamp, float value,
-			float comparisionValue) {
-		this.timestamp = timestamp;
-		this.value = value;
-		this.comparisionValue = comparisionValue;
-	}
-
-	public AssetLevelMetricsDetail(long timestamp, float value, MADEntity MAD) {
-		this.timestamp = timestamp;
-		this.value = value;
-		this.MAD = MAD;
-	}
-
-	public AssetLevelMetricsDetail(BollingerBandsEntity bolling) {
-		this.bolling = bolling;
-	}
-
-	public AssetLevelMetricsDetail(MADEntity MAD) {
-		this.MAD = MAD;
-	}
-
-	public AssetLevelMetricsDetail(AssetLevelMetricsDetail other) {
-		this.timestamp = other.getTimestamp();
-		this.value = other.getValue();
-		this.comparisionValue = other.getComparisionValue();
-		if (other.getBolling() != null)
-			this.bolling = new BollingerBandsEntity(other.getBolling()
-					.getUpper(), other.getBolling().getLower(), other
-					.getBolling().getMean());
-		if (other.getMAD() != null)
-			this.MAD = new MADEntity(other.getMAD().getUpper(), other.getMAD()
-					.getLower());
-	}
-
-	public AssetLevelMetricsDetail(float comparisionValue) {
-		this.comparisionValue = comparisionValue;
-	}
-
-	public long getTimestamp() {
-		return timestamp;
-	}
-
-	public void setTimestamp(long timestamp) {
-		this.timestamp = timestamp;
-	}
-
-	public float getValue() {
-		return value;
-	}
-
-	public void setValue(float value) {
-		this.value = value;
-	}
-
-	public BollingerBandsEntity getBolling() {
-		return bolling;
-	}
-
-	public void setBolling(BollingerBandsEntity bolling) {
-		this.bolling = bolling;
-	}
-
-	public float getComparisionValue() {
-		return comparisionValue;
-	}
-
-	public void setComparisionValue(float comparisionValue) {
-		this.comparisionValue = comparisionValue;
-	}
-
-	public MADEntity getMAD() {
-		return MAD;
-	}
-
-	public void setMAD(MADEntity mAD) {
-		MAD = mAD;
-	}
-
-	@Override
-	public int compareTo(AssetLevelMetricsDetail o) {
-		return (int) Math.signum(o.getTimestamp() - this.getTimestamp());
-//		return o.getTimestamp() == this.getTimestamp() ? 0
-//				: (o.getTimestamp() > this.getTimestamp() ? 1 : -1);
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BaseObj.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BaseObj.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BaseObj.java
deleted file mode 100644
index 39225d1..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BaseObj.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-import java.util.Set;
-
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
-
-import com.ebay.oss.griffin.error.ErrorMessage;
-
-public class BaseObj {
-
-	public ErrorMessage validate() {
-		ErrorMessage msg = null;
-
-		ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
-		Validator validator = factory.getValidator();
-		Set<ConstraintViolation<BaseObj>> constraintViolations = validator
-				.validate(this);
-
-		if (constraintViolations.size() > 0) {
-			msg = new ErrorMessage();
-			StringBuffer error = new StringBuffer();
-			for (ConstraintViolation<BaseObj> violation : constraintViolations) {
-				error.append(violation.getPropertyPath() + " " + violation.getMessage() + ", ");
-			}
-
-			msg.setMessage(error.toString());
-
-		}
-
-		return msg;
-
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/f629d0f4/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BollingerBandsEntity.java
----------------------------------------------------------------------
diff --git a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BollingerBandsEntity.java b/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BollingerBandsEntity.java
deleted file mode 100644
index 2067c0e..0000000
--- a/griffin-core/src/main/java/com/ebay/oss/griffin/vo/BollingerBandsEntity.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
-	Copyright (c) 2016 eBay Software Foundation.
-	Licensed under the Apache License, Version 2.0 (the "License");
-	you may not use this file except in compliance with the License.
-	You may obtain a copy of the License at
-
-	    http://www.apache.org/licenses/LICENSE-2.0
-
-	Unless required by applicable law or agreed to in writing, software
-	distributed under the License is distributed on an "AS IS" BASIS,
-	WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-	See the License for the specific language governing permissions and
-	limitations under the License.
- */
-package com.ebay.oss.griffin.vo;
-
-
-public class BollingerBandsEntity {
-    private long upper;
-    private long lower;
-    private long mean;
-
-    public BollingerBandsEntity() {}
-
-    public BollingerBandsEntity(long upper, long lower, long mean) {
-        this.upper = upper;
-        this.lower = lower;
-        this.mean = mean;
-    }
-
-    public long getUpper() {
-        return upper;
-    }
-
-    public void setUpper(long upper) {
-        this.upper = upper;
-    }
-
-    public long getLower() {
-        return lower;
-    }
-
-    public void setLower(long lower) {
-        this.lower = lower;
-    }
-
-    public long getMean() {
-        return mean;
-    }
-
-    public void setMean(long mean) {
-        this.mean = mean;
-    }
-
-
-    public BollingerBandsEntity clone() {
-        return new BollingerBandsEntity(getUpper(), getLower(), getMean());
-    }
-
-
-}