You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@sdap.apache.org by GitBox <gi...@apache.org> on 2018/03/07 21:50:24 UTC

[GitHub] Yongyao closed pull request #6: SDAP-35 (completed the configuration change)

Yongyao closed pull request #6: SDAP-35 (completed the configuration change)
URL: https://github.com/apache/incubator-sdap-mudrod/pull/6
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryEngineAbstract.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryEngineAbstract.java
index 6e256c3..6c622e9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryEngineAbstract.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryEngineAbstract.java
@@ -13,12 +13,12 @@
  */
 package org.apache.sdap.mudrod.discoveryengine;
 
-import java.io.Serializable;
-import java.util.Properties;
-
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 
+import java.io.Serializable;
+import java.util.Properties;
+
 public abstract class DiscoveryEngineAbstract extends MudrodAbstract implements Serializable {
   /**
    *
@@ -43,4 +43,4 @@ public DiscoveryEngineAbstract(Properties props, ESDriver es, SparkDriver spark)
    * Abstract method of output
    */
   public abstract void output();
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryStepAbstract.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryStepAbstract.java
index 9af4965..49a8b97 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryStepAbstract.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/DiscoveryStepAbstract.java
@@ -13,11 +13,11 @@
  */
 package org.apache.sdap.mudrod.discoveryengine;
 
-import java.util.Properties;
-
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 
+import java.util.Properties;
+
 /*
  * Generic class of discovery engine step
  */
@@ -47,4 +47,4 @@ public DiscoveryStepAbstract(Properties props, ESDriver es, SparkDriver spark) {
    */
   public abstract Object execute(Object o);
 
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/MudrodAbstract.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/MudrodAbstract.java
index d62c627..7888c50 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/MudrodAbstract.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/MudrodAbstract.java
@@ -13,10 +13,10 @@
  */
 package org.apache.sdap.mudrod.discoveryengine;
 
-import org.apache.commons.io.IOUtils;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.commons.io.IOUtils;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
@@ -52,7 +52,7 @@ public MudrodAbstract(Properties props, ESDriver es, SparkDriver spark) {
     this.es = es;
     this.spark = spark;
 
-    if (this.props != null) {
+    if (this.props != null && this.es!=null) {
       this.initMudrod();
     }
   }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
index d54a556..cc66ad6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/RecommendEngine.java
@@ -4,11 +4,11 @@
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.recommendation.pre.ImportMetadata;
 import org.apache.sdap.mudrod.recommendation.pre.MetadataTFIDFGenerator;
-import org.apache.sdap.mudrod.recommendation.pre.NormalizeVariables;
+import org.apache.sdap.mudrod.recommendation.pre.NormalizeFeatures;
 import org.apache.sdap.mudrod.recommendation.pre.SessionCooccurence;
 import org.apache.sdap.mudrod.recommendation.process.AbstractBasedSimilarity;
-import org.apache.sdap.mudrod.recommendation.process.VariableBasedSimilarity;
-import org.apache.sdap.mudrod.recommendation.process.SessionBasedCF;
+import org.apache.sdap.mudrod.recommendation.process.FeatureBasedSimilarity;
+import org.apache.sdap.mudrod.recommendation.process.sessionBasedCF;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -39,7 +39,7 @@ public void preprocess() {
     DiscoveryStepAbstract sessionMatrixGen = new SessionCooccurence(this.props, this.es, this.spark);
     sessionMatrixGen.execute();
 
-    DiscoveryStepAbstract transformer = new NormalizeVariables(this.props, this.es, this.spark);
+    DiscoveryStepAbstract transformer = new NormalizeFeatures(this.props, this.es, this.spark);
     transformer.execute();
 
     endTime = System.currentTimeMillis();
@@ -57,10 +57,10 @@ public void process() {
     DiscoveryStepAbstract tfCF = new AbstractBasedSimilarity(this.props, this.es, this.spark);
     tfCF.execute();
 
-    DiscoveryStepAbstract cbCF = new VariableBasedSimilarity(this.props, this.es, this.spark);
+    DiscoveryStepAbstract cbCF = new FeatureBasedSimilarity(this.props, this.es, this.spark);
     cbCF.execute();
 
-    DiscoveryStepAbstract sbCF = new SessionBasedCF(this.props, this.es, this.spark);
+    DiscoveryStepAbstract sbCF = new sessionBasedCF(this.props, this.es, this.spark);
     sbCF.execute();
 
     endTime = System.currentTimeMillis();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
index b2cdb9f..68724c1 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/WeblogDiscoveryEngine.java
@@ -13,16 +13,16 @@
  */
 package org.apache.sdap.mudrod.discoveryengine;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.weblog.pre.*;
 import org.apache.sdap.mudrod.weblog.process.ClickStreamAnalyzer;
 import org.apache.sdap.mudrod.weblog.process.UserHistoryAnalyzer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -99,11 +99,11 @@ public void preprocess() {
 
     ArrayList<String> inputList = (ArrayList<String>) getFileList(props.getProperty(MudrodConstants.DATA_DIR));
 
-    for (String anInputList : inputList) {
-      timeSuffix = anInputList;
+    for (int i = 0; i < inputList.size(); i++) {
+      timeSuffix = inputList.get(i);
       props.put(MudrodConstants.TIME_SUFFIX, timeSuffix);
       startTime = System.currentTimeMillis();
-      LOG.info("Processing logs dated {}", anInputList);
+      LOG.info("Processing logs dated {}", inputList.get(i));
 
       DiscoveryStepAbstract im = new ImportLogFile(this.props, this.es, this.spark);
       im.execute();
@@ -122,7 +122,7 @@ public void preprocess() {
 
       endTime = System.currentTimeMillis();
 
-      LOG.info("Web log preprocessing for logs dated {} complete. Time elapsed {} seconds.", anInputList, (endTime - startTime) / 1000);
+      LOG.info("Web log preprocessing for logs dated {} complete. Time elapsed {} seconds.", inputList.get(i), (endTime - startTime) / 1000);
     }
 
     DiscoveryStepAbstract hg = new HistoryGenerator(this.props, this.es, this.spark);
@@ -140,8 +140,8 @@ public void preprocess() {
   public void logIngest() {
     LOG.info("Starting Web log ingest.");
     ArrayList<String> inputList = (ArrayList<String>) getFileList(props.getProperty(MudrodConstants.DATA_DIR));
-    for (String anInputList : inputList) {
-      timeSuffix = anInputList;
+    for (int i = 0; i < inputList.size(); i++) {
+      timeSuffix = inputList.get(i);
       props.put("TimeSuffix", timeSuffix);
       DiscoveryStepAbstract im = new ImportLogFile(this.props, this.es, this.spark);
       im.execute();
@@ -157,8 +157,8 @@ public void logIngest() {
   public void sessionRestruct() {
     LOG.info("Starting Session reconstruction.");
     ArrayList<String> inputList = (ArrayList<String>) getFileList(props.getProperty(MudrodConstants.DATA_DIR));
-    for (String anInputList : inputList) {
-      timeSuffix = anInputList; // change timeSuffix dynamically
+    for (int i = 0; i < inputList.size(); i++) {
+      timeSuffix = inputList.get(i); // change timeSuffix dynamically
       props.put(MudrodConstants.TIME_SUFFIX, timeSuffix);
       DiscoveryStepAbstract cd = new CrawlerDetection(this.props, this.es, this.spark);
       cd.execute();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/package-info.java
index a5951be..40e7ead 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/discoveryengine/package-info.java
@@ -16,4 +16,4 @@
  * Workflow classes such as weblogDiscoveryEngine, OntologyDiscoveryEngine, and
  * MetadataDiscoveryEngine are also included here.
  */
-package org.apache.sdap.mudrod.discoveryengine;
+package org.apache.sdap.mudrod.discoveryengine;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java b/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
index 54b9128..dcf8091 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/driver/ESDriver.java
@@ -15,11 +15,10 @@
 
 import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
 import com.google.gson.GsonBuilder;
-
-import org.apache.commons.lang.StringUtils;
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.apache.sdap.mudrod.utils.ESTransportClient;
+import org.apache.commons.lang.StringUtils;
 import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
 import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse.AnalyzeToken;
 import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
@@ -168,15 +167,22 @@ public String customAnalyzing(String indexName, String analyzer, String str) thr
     if (list == null) {
       return list;
     }
+    int size = list.size();
     List<String> customlist = new ArrayList<>();
-    for (String aList : list) {
-      customlist.add(this.customAnalyzing(indexName, aList));
+    for (int i = 0; i < size; i++) {
+      customlist.add(this.customAnalyzing(indexName, list.get(i)));
     }
 
     return customlist;
   }
 
   public void deleteAllByQuery(String index, String type, QueryBuilder query) {
+    ImmutableOpenMap<String, MappingMetaData> mappings = getClient().admin().cluster().prepareState().execute().actionGet()
+        .getState().metaData().index(index).getMappings();
+    
+    //check if the type exists
+    if (!mappings.containsKey(type)) return;
+    
     createBulkProcessor();
     SearchResponse scrollResp = getClient().prepareSearch(index).setSearchType(SearchType.QUERY_AND_FETCH).setTypes(type).setScroll(new TimeValue(60000)).setQuery(query).setSize(10000).execute()
         .actionGet();
@@ -223,7 +229,9 @@ public void deleteType(String index, String type) {
     String[] indices = client.admin().indices().getIndex(new GetIndexRequest()).actionGet().getIndices();
 
     ArrayList<String> indexList = new ArrayList<>();
-    for (String indexName : indices) {
+    int length = indices.length;
+    for (int i = 0; i < length; i++) {
+      String indexName = indices[i];
       if (indexName.startsWith(object.toString())) {
         indexList.add(indexName);
       }
@@ -561,6 +569,9 @@ public int getDocCount(String[] index, String[] type) {
     return this.getDocCount(index, type, search);
   }
 
+  /*
+   * Get the number of docs in a type of a index
+   */
   public int getDocCount(String[] index, String[] type, QueryBuilder filterSearch) {
     SearchRequestBuilder countSrBuilder = getClient().prepareSearch(index).setTypes(type).setQuery(filterSearch).setSize(0);
     SearchResponse countSr = countSrBuilder.execute().actionGet();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java b/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
index 14e7f1a..c6b0aef 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/driver/SparkDriver.java
@@ -19,11 +19,43 @@
 import org.apache.spark.serializer.KryoSerializer;
 import org.apache.spark.sql.SQLContext;
 
+import java.io.File;
 import java.io.Serializable;
+import java.net.URISyntaxException;
 import java.util.Properties;
+//import org.apache.spark.sql.SparkSession;
 
 public class SparkDriver implements Serializable {
 
+  //TODO the commented out code below is the API uprgade
+  //for Spark 2.0.0. It requires a large upgrade and simplification
+  //across the mudrod codebase so should be done in an individual ticket.
+  //  /**
+  //   *
+  //   */
+  //  private static final long serialVersionUID = 1L;
+  //  private SparkSession builder;
+  //
+  //  public SparkDriver() {
+  //    builder = SparkSession.builder()
+  //        .master("local[2]")
+  //        .config("spark.hadoop.validateOutputSpecs", "false")
+  //        .config("spark.files.overwrite", "true")
+  //        .getOrCreate();
+  //  }
+  //
+  //  public SparkSession getBuilder() {
+  //    return builder;
+  //  }
+  //
+  //  public void setBuilder(SparkSession builder) {
+  //    this.builder = builder;
+  //  }
+  //
+  //  public void close() {
+  //    builder.stop();
+  //  }
+
   /**
    *
    */
diff --git a/core/src/main/java/org/apache/sdap/mudrod/driver/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/driver/package-info.java
index cdffdfa..79ab611 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/driver/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/driver/package-info.java
@@ -15,4 +15,4 @@
  * This package includes commonly used Elasticsearch and Spark related
  * functions
  */
-package org.apache.sdap.mudrod.driver;
+package org.apache.sdap.mudrod.driver;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/integration/LinkageIntegration.java b/core/src/main/java/org/apache/sdap/mudrod/integration/LinkageIntegration.java
index edb97ca..b18a913 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/integration/LinkageIntegration.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/integration/LinkageIntegration.java
@@ -16,10 +16,11 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.search.SearchHit;
@@ -42,7 +43,7 @@
   private static final long serialVersionUID = 1L;
   transient List<LinkedTerm> termList = new ArrayList<>();
   DecimalFormat df = new DecimalFormat("#.00");
-  private static final String INDEX_NAME = "indexName";
+  private static final String INDEX_NAME = MudrodConstants.ES_INDEX_NAME;
   private static final String WEIGHT = "weight";
 
   public LinkageIntegration(Properties props, ESDriver es, SparkDriver spark) {
@@ -54,7 +55,7 @@ public LinkageIntegration(Properties props, ESDriver es, SparkDriver spark) {
    */
   class LinkedTerm {
     String term = null;
-    double weight = 0;
+    double weight = 0.0;
     String model = null;
 
     public LinkedTerm(String str, double w, String m) {
@@ -108,13 +109,10 @@ public Object execute(Object o) {
       }
 
       double finalWeight = tmp + ((sumModelWeight - 2) * 0.05);
-      if (finalWeight < 0) {
-        finalWeight = 0;
-      }
-
-      if (finalWeight > 1) {
-        finalWeight = 1;
-      }
+      
+      if (finalWeight < 0) finalWeight = 0;
+      if (finalWeight > 1) finalWeight = 1;
+      
       termsMap.put(entry.getKey(), Double.parseDouble(df.format(finalWeight)));
     }
 
@@ -173,32 +171,32 @@ public JsonObject getIngeratedListInJson(String input) {
    * the similarities from different sources
    */
   public Map<String, List<LinkedTerm>> aggregateRelatedTermsFromAllmodel(String input) {
-    aggregateRelatedTerms(input, props.getProperty("userHistoryLinkageType"));
-    aggregateRelatedTerms(input, props.getProperty("clickStreamLinkageType"));
-    aggregateRelatedTerms(input, props.getProperty("metadataLinkageType"));
-    aggregateRelatedTermsSWEET(input, props.getProperty("ontologyLinkageType"));
+    aggregateRelatedTerms(input, MudrodConstants.USE_HISTORY_LINKAGE_TYPE);
+    aggregateRelatedTerms(input, MudrodConstants.CLICK_STREAM_LINKAGE_TYPE);
+    aggregateRelatedTerms(input, MudrodConstants.METADATA_LINKAGE_TYPE);
+    aggregateRelatedTermsSWEET(input, MudrodConstants.ONTOLOGY_LINKAGE_TYPE);
 
     return termList.stream().collect(Collectors.groupingBy(w -> w.term));
   }
 
   public int getModelweight(String model) {
-    if (model.equals(props.getProperty("userHistoryLinkageType"))) {
-      return Integer.parseInt(props.getProperty("userHistory_w"));
+    if (model.equals(MudrodConstants.USE_HISTORY_LINKAGE_TYPE)) {
+      return Integer.parseInt(props.getProperty(MudrodConstants.USER_HISTORY_W));
     }
 
-    if (model.equals(props.getProperty("clickStreamLinkageType"))) {
-      return Integer.parseInt(props.getProperty("clickStream_w"));
+    if (model.equals(MudrodConstants.CLICK_STREAM_LINKAGE_TYPE)) {
+      return Integer.parseInt(props.getProperty(MudrodConstants.CLICKSTREAM_W));
     }
 
-    if (model.equals(props.getProperty("metadataLinkageType"))) {
-      return Integer.parseInt(props.getProperty("metadata_w"));
+    if (model.equals(MudrodConstants.METADATA_LINKAGE_TYPE)) {
+      return Integer.parseInt(props.getProperty(MudrodConstants.METADATA_W));
     }
 
-    if (model.equals(props.getProperty("ontologyLinkageType"))) {
-      return Integer.parseInt(props.getProperty("ontology_w"));
+    if (model.equals(MudrodConstants.ONTOLOGY_LINKAGE_TYPE)) {
+      return Integer.parseInt(props.getProperty(MudrodConstants.ONTOLOGY_W));
     }
 
-    return 999999;
+    return Integer.MAX_VALUE;
   }
 
   /**
diff --git a/core/src/main/java/org/apache/sdap/mudrod/integration/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/integration/package-info.java
index fb2aab5..4ed6a07 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/integration/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/integration/package-info.java
@@ -15,4 +15,4 @@
  * This package includes integration method of web log, ontology, and metdata
  * mining results.
  */
-package org.apache.sdap.mudrod.integration;
+package org.apache.sdap.mudrod.integration;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodConstants.java b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodConstants.java
index d9435d9..afdf04a 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodConstants.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodConstants.java
@@ -13,8 +13,6 @@
  */
 package org.apache.sdap.mudrod.main;
 
-import org.apache.sdap.mudrod.ontology.Ontology;
-
 /**
  * Class contains static constant keys and values relating to Mudrod
  * configuration properties. Property values are read from <a href=
@@ -22,54 +20,66 @@
  */
 public interface MudrodConstants {
 
-  public static final String CLEANUP_TYPE_PREFIX = "Cleanup_type_prefix";
-
-  public static final String CLICK_STREAM_LINKAGE_TYPE = "clickStreamLinkageType";
+  public static final String CLEANUP_TYPE = "cleanupLog";
 
-  public static final String CLICK_STREAM_MATRIX_TYPE = "clickStreamMatrixType";
+  public static final String CLICK_STREAM_LINKAGE_TYPE = "clickStreamLinkage";
 
-  public static final String CLICKSTREAM_SVD_DIM = "clickstreamSVDDimension";
+  public static final String CLICK_STREAM_MATRIX_TYPE = "clickStreamMatrix";
 
-  public static final String CLICKSTREAM_W = "clickStream_w";
+  public static final String CLICKSTREAM_SVD_DIM = "mudrod.clickstream.svd.d";
 
-  public static final String COMMENT_TYPE = "commentType";
+  public static final String CLICKSTREAM_W = "mudrod.clickstream.weight";
+  
+  public static final String CLICKSTREAM_PATH = "mudrod.clickstream.path";
+  
+  public static final String CLICKSTREAM_SVD_PATH = "mudrod.clickstream.svd.path";
 
   /** Defined on CLI */
   public static final String DATA_DIR = "dataDir";
 
-  public static final String DOWNLOAD_F = "downloadf";
+  public static final String DOWNLOAD_WEIGHT = "mudrod.download.weight";
 
-  public static final String DOWNLOAD_WEIGHT = "downloadWeight";
+  public static final String ES_CLUSTER = "mudrod.cluster.name";
 
-  public static final String ES_CLUSTER = "clusterName";
+  public static final String ES_TRANSPORT_TCP_PORT = "mudrod.es.transport.tcp.port";
 
-  public static final String ES_TRANSPORT_TCP_PORT = "ES_Transport_TCP_Port";
+  public static final String ES_UNICAST_HOSTS = "mudrod.es.unicast.hosts";
 
-  public static final String ES_UNICAST_HOSTS = "ES_unicast_hosts";
+  public static final String ES_HTTP_PORT = "mudrod.es.http.port";
 
-  public static final String ES_HTTP_PORT = "ES_HTTP_port";
+  public static final String ES_INDEX_NAME = "mudrod.es.index";
 
-  public static final String ES_INDEX_NAME = "indexName";
+  public static final String FTP_PREFIX = "mudrod.ftp.prefix";
 
-  public static final String FTP_PREFIX = "ftpPrefix";
+  public static final String FTP_TYPE = "rawftp";
+  
+  public static final String FTP_LOG = "ftp";
 
-  public static final String FTP_TYPE_PREFIX = "FTP_type_prefix";
+  public static final String HTTP_PREFIX = "mudrod.http.prefix";
 
-  public static final String HTTP_PREFIX = "httpPrefix";
+  public static final String HTTP_TYPE = "rawhttp";
+  
+  public static final String HTTP_LOG = "http";
+  
+  public static final String BASE_URL = "mudrod.base.url";
+  
+  public static final String BLACK_LIST_REQUEST = "mudrod.black.request.list";
+  
+  public static final String BLACK_LIST_AGENT = "mudrod.black.agent.list";
 
-  public static final String HTTP_TYPE_PREFIX = "HTTP_type_prefix";
+  public static final String LOG_INDEX = "mudrod.log.index";
 
-  public static final String LOG_INDEX = "logIndexName";
+  public static final String METADATA_LINKAGE_TYPE = "MetadataLinkage";
+  
+  public static final String METADATA_DOWNLOAD_URL = "mudrod.metadata.download.url";
 
-  public static final String METADATA_LINKAGE_TYPE = "metadataLinkageType";
+  public static final String METADATA_SVD_DIM = "mudrod.metadata.svd.d";
 
-  public static final String METADATA_SVD_DIM = "metadataSVDDimension";
+  public static final String METADATA_URL = "mudrod.metadata.url";
 
-  public static final String METADATA_URL = "metadataurl";
+  public static final String METADATA_W = "mudrod.metadata.weight";
 
-  public static final String METADATA_W = "metadata_w";
-
-  public static final String MINI_USER_HISTORY = "mini_userHistory";
+  public static final String QUERY_MIN = "mudrod.query.min";
 
   public static final String MUDROD = "mudrod";
 
@@ -80,44 +90,80 @@
    */
   public static final String ONTOLOGY_IMPL = MUDROD + "ontology.implementation";
 
-  public static final String ONTOLOGY_LINKAGE_TYPE = "ontologyLinkageType";
+  public static final String ONTOLOGY_LINKAGE_TYPE = "ontologyLinkage";
 
-  public static final String ONTOLOGY_W = "ontology_w";
+  public static final String ONTOLOGY_W = "mudrod.ontology.weight";
+  
+  public static final String ONTOLOGY_PATH = "mudrod.ontology.path";
+  
+  public static final String ONTOLOGY_INPUT_PATH = "mudrod.ontology.input.path";
 
-  public static final String PROCESS_TYPE = "processingType";
+  public static final String PROCESS_TYPE = "mudrod.processing.type";
 
   /** Defined on CLI */
-  public static final String RAW_METADATA_PATH = "raw_metadataPath";
-
-  public static final String RAW_METADATA_TYPE = "raw_metadataType";
-
-  public static final String SEARCH_F = "searchf";
-
-  public static final String SENDING_RATE = "sendingrate";
-
-  public static final String SESSION_PORT = "SessionPort";
-
-  public static final String SESSION_STATS_PREFIX = "SessionStats_prefix";
-
-  public static final String SESSION_URL = "SessionUrl";
-
-  public static final String SPARK_APP_NAME = "spark.app.name";
-
-  public static final String SPARK_MASTER = "spark.master";
+  public static final String METADATA_DOWNLOAD = "mudrod.metadata.download";
+  
+  public static final String RAW_METADATA_PATH = "mudrod.metadata.path";
+
+  public static final String RAW_METADATA_TYPE = "mudrod.metadata.type";
+  
+  public static final String METADATA_MATRIX_PATH = "mudrod.metadata.matrix.path";
+  
+  public static final String METADATA_SVD_PATH = "mudrod.metadata.svd.path";
+  
+  public static final String RECOM_METADATA_TYPE = "recommedation.metadata";
+  
+  public static final String METADATA_ID = "mudrod.metadata.id";
+  
+  public static final String SEMANTIC_FIELDS = "mudrod.metadata.semantic.fields";
+  
+  public static final String METADATA_WORD_SIM_TYPE = "metadata.word.sim";
+  
+  public static final String METADATA_FEATURE_SIM_TYPE = "metadata.feature.sim";
+  
+  public static final String METADATA_SESSION_SIM_TYPE = "metadata.session.sim";
+  
+  public static final String METADATA_TERM_MATRIX_PATH = "metadata.term.matrix.path";
+  
+  public static final String METADATA_WORD_MATRIX_PATH = "metadata.word.matrix.path";
+  
+  public static final String METADATA_SESSION_MATRIX_PATH = "metadata.session.matrix.path";
+
+  public static final String REQUEST_RATE = "mudrod.request.rate";
+
+  public static final String SESSION_PORT = "mudrod.session.port";
+
+  public static final String SESSION_STATS_TYPE = "sessionstats";
+
+  public static final String SESSION_URL = "mudrod.session.url";
+
+  public static final String SPARK_APP_NAME = "mudrod.spark.app.name";
+
+  public static final String SPARK_MASTER = "mudrod.spark.master";
   /**
    * Absolute local location of javaSVMWithSGDModel directory. This is typically
    * <code>file:///usr/local/mudrod/core/src/main/resources/javaSVMWithSGDModel</code>
    */
-  public static final String SVM_SGD_MODEL = "svmSgdModel";
+  public static final String RANKING_MODEL = "mudrod.ranking.model";
 
-  public static final String TIMEGAP = "timegap";
+  public static final String REQUEST_TIME_GAP = "mudrod.request.time.gap";
 
   public static final String TIME_SUFFIX = "TimeSuffix";
 
-  public static final String USE_HISTORY_LINKAGE_TYPE = "userHistoryLinkageType";
-
-  public static final String USER_HISTORY_W = "userHistory_w";
-
-  public static final String VIEW_F = "viewf";
+  public static final String USE_HISTORY_LINKAGE_TYPE = "userHistoryLinkage";
+
+  public static final String USER_HISTORY_W = "mudrod.user.history.weight";
+  
+  public static final String USER_HISTORY_PATH = "mudrod.user.history.path";
+
+  public static final String VIEW_F = "mudrod.view.freq";
+  
+  public static final String VIEW_MARKER = "mudrod.view.url.marker";
+  
+  public static final String SEARCH_MARKER = "mudrod.search.url.marker";
+  
+  public static final String SEARCH_F = "mudrod.search.freq";
+  
+  public static final String DOWNLOAD_F = "mudrod.download.freq";
 
 }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
index 359ae71..b282ed6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/main/MudrodEngine.java
@@ -13,6 +13,15 @@
  */
 package org.apache.sdap.mudrod.main;
 
+import org.apache.sdap.mudrod.discoveryengine.DiscoveryEngineAbstract;
+import org.apache.sdap.mudrod.discoveryengine.MetadataDiscoveryEngine;
+import org.apache.sdap.mudrod.discoveryengine.OntologyDiscoveryEngine;
+import org.apache.sdap.mudrod.discoveryengine.RecommendEngine;
+import org.apache.sdap.mudrod.discoveryengine.WeblogDiscoveryEngine;
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.integration.LinkageIntegration;
+
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -22,23 +31,9 @@
 import org.apache.commons.cli.Options;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.sdap.mudrod.discoveryengine.DiscoveryEngineAbstract;
-import org.apache.sdap.mudrod.discoveryengine.MetadataDiscoveryEngine;
-import org.apache.sdap.mudrod.discoveryengine.OntologyDiscoveryEngine;
-import org.apache.sdap.mudrod.discoveryengine.RecommendEngine;
-import org.apache.sdap.mudrod.discoveryengine.WeblogDiscoveryEngine;
-import org.apache.sdap.mudrod.driver.ESDriver;
-import org.apache.sdap.mudrod.driver.SparkDriver;
-import org.apache.sdap.mudrod.integration.LinkageIntegration;
-import org.jdom2.Document;
-import org.jdom2.Element;
-import org.jdom2.JDOMException;
-import org.jdom2.input.SAXBuilder;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.sdap.mudrod.main.MudrodConstants.DATA_DIR;
-
 import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
@@ -52,11 +47,13 @@
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
+import static org.apache.sdap.mudrod.main.MudrodConstants.DATA_DIR;
+
 /**
  * Main entry point for Running the Mudrod system. Invocation of this class is
  * tightly linked to the primary Mudrod configuration which can be located at
  * <a href=
- * "https://github.com/mudrod/mudrod/blob/master/core/src/main/resources/config.xml">config.xml</a>.
+ * "https://github.com/mudrod/mudrod/blob/master/core/src/main/resources/config.properties">config.properties</a>.
  */
 public class MudrodEngine {
 
@@ -138,13 +135,13 @@ private InputStream locateConfig() {
       LOG.info("Loaded config file from " + configFile.getAbsolutePath());
       return configStream;
     } catch (IOException e) {
-      LOG.info("File specified by environment variable " + MudrodConstants.MUDROD_CONFIG + "=\'" + configLocation + "\' could not be loaded. " + e.getMessage());
+      LOG.info("File specified by environment variable " + MudrodConstants.MUDROD_CONFIG + "=\'" + configLocation + "\' could not be loaded. Default configuration will be used." + e.getMessage());
     }
 
-    InputStream configStream = MudrodEngine.class.getClassLoader().getResourceAsStream("config.xml");
+    InputStream configStream = MudrodEngine.class.getClassLoader().getResourceAsStream("config.properties");
 
     if (configStream != null) {
-      LOG.info("Loaded config file from {}", MudrodEngine.class.getClassLoader().getResource("config.xml").getPath());
+      LOG.info("Loaded config file from {}", MudrodEngine.class.getClassLoader().getResource("config.properties").getPath());
     }
 
     return configStream;
@@ -152,34 +149,24 @@ private InputStream locateConfig() {
 
   /**
    * Load the configuration provided at <a href=
-   * "https://github.com/mudrod/mudrod/blob/master/core/src/main/resources/config.xml">config.xml</a>.
+   * "https://github.com/mudrod/mudrod/blob/master/core/src/main/resources/config.properties">config.properties</a>.
    *
    * @return a populated {@link java.util.Properties} object.
    */
   public Properties loadConfig() {
-    SAXBuilder saxBuilder = new SAXBuilder();
-
     InputStream configStream = locateConfig();
-
-    Document document;
     try {
-      document = saxBuilder.build(configStream);
-      Element rootNode = document.getRootElement();
-      List<Element> paraList = rootNode.getChildren("para");
-
-      for (Element paraNode : paraList) {
-        String attributeName = paraNode.getAttributeValue("name");
-        if (MudrodConstants.SVM_SGD_MODEL.equals(attributeName)) {
-          props.put(attributeName, decompressSVMWithSGDModel(paraNode.getTextTrim()));
-        } else {
-          props.put(attributeName, paraNode.getTextTrim());
-        }
+      props.load(configStream);
+      for(String key : props.stringPropertyNames()) {
+        props.put(key, props.getProperty(key).trim());
       }
-    } catch (JDOMException | IOException e) {
-      LOG.error("Exception whilst retrieving or processing XML contained within 'config.xml'!", e);
+      String rankingModelPath = props.getProperty(MudrodConstants.RANKING_MODEL);
+      props.put(MudrodConstants.RANKING_MODEL, decompressSVMWithSGDModel(rankingModelPath));
+    } catch (IOException e) {
+      LOG.info("Fail to load the sytem config file");
     }
+    
     return getConfig();
-
   }
 
   private String decompressSVMWithSGDModel(String archiveName) throws IOException {
@@ -267,6 +254,7 @@ public void startFullIngest() {
   /**
    * Only preprocess various {@link DiscoveryEngineAbstract} implementations for
    * weblog, ontology and metadata, linkage discovery and integration.
+   * This command dose not perform log preprocessing
    */
   public void startProcessing() {
     DiscoveryEngineAbstract wd = new WeblogDiscoveryEngine(props, es, spark);
@@ -383,7 +371,7 @@ public static void main(String[] args) {
 
       me.es = new ESDriver(me.getConfig());
       me.spark = new SparkDriver(me.getConfig());
-      loadFullConfig(me, dataDir);
+      loadPathConfig(me, dataDir);
       if (processingType != null) {
         switch (processingType) {
         case PROCESSING:
@@ -410,27 +398,19 @@ public static void main(String[] args) {
     }
   }
 
-  private static void loadFullConfig(MudrodEngine me, String dataDir) {
-    //TODO all of the properties defined below, which are determined are
-    //runtime need to be added to MudrodConstants.java and referenced 
-    //accordingly and consistently from Properties.getProperty(MudrodConstant...);
-    me.props.put("ontologyInputDir", dataDir + "SWEET_ocean/");
-    me.props.put("oceanTriples", dataDir + "Ocean_triples.csv");
-    me.props.put("userHistoryMatrix", dataDir + "UserHistoryMatrix.csv");
-    me.props.put("clickstreamMatrix", dataDir + "ClickstreamMatrix.csv");
-    me.props.put("metadataMatrix", dataDir + "MetadataMatrix.csv");
-    me.props.put("clickstreamSVDMatrix_tmp", dataDir + "clickstreamSVDMatrix_tmp.csv");
-    me.props.put("metadataSVDMatrix_tmp", dataDir + "metadataSVDMatrix_tmp.csv");
-    me.props.put("raw_metadataPath", dataDir + me.props.getProperty(MudrodConstants.RAW_METADATA_TYPE));
-
-    me.props.put("jtopia", dataDir + "jtopiaModel");
-    me.props.put("metadata_term_tfidf_matrix", dataDir + "metadata_term_tfidf.csv");
-    me.props.put("metadata_word_tfidf_matrix", dataDir + "metadata_word_tfidf.csv");
-    me.props.put("session_metadata_Matrix", dataDir + "metadata_session_coocurrence_matrix.csv");
-
-    me.props.put("metadataOBCode", dataDir + "MetadataOHCode");
-    me.props.put("metadata_topic", dataDir + "metadata_topic");
-    me.props.put("metadata_topic_matrix", dataDir + "metadata_topic_matrix.csv");
+  private static void loadPathConfig(MudrodEngine me, String dataDir) {
+    me.props.put(MudrodConstants.ONTOLOGY_INPUT_PATH, dataDir + "SWEET_ocean/");
+    me.props.put(MudrodConstants.ONTOLOGY_PATH, dataDir + "ocean_triples.csv");
+    me.props.put(MudrodConstants.USER_HISTORY_PATH, dataDir + "userhistorymatrix.csv");
+    me.props.put(MudrodConstants.CLICKSTREAM_PATH, dataDir + "clickstreammatrix.csv");
+    me.props.put(MudrodConstants.METADATA_MATRIX_PATH, dataDir + "metadatamatrix.csv");
+    me.props.put(MudrodConstants.CLICKSTREAM_SVD_PATH, dataDir + "clickstreamsvdmatrix_tmp.csv");
+    me.props.put(MudrodConstants.METADATA_SVD_PATH, dataDir + "metadatasvdMatrix_tmp.csv");
+    me.props.put(MudrodConstants.RAW_METADATA_PATH, dataDir + me.props.getProperty(MudrodConstants.RAW_METADATA_TYPE));
+
+    me.props.put(MudrodConstants.METADATA_TERM_MATRIX_PATH, dataDir + "metadata_term_tfidf.csv");
+    me.props.put(MudrodConstants.METADATA_WORD_MATRIX_PATH, dataDir + "metadata_word_tfidf.csv");
+    me.props.put(MudrodConstants.METADATA_SESSION_MATRIX_PATH, dataDir + "metadata_session_coocurrence_matrix.csv");
   }
 
   /**
diff --git a/core/src/main/java/org/apache/sdap/mudrod/main/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/main/package-info.java
index 2f4496a..8a3af67 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/main/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/main/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes Main entry point for Running the Mudrod system.
  */
-package org.apache.sdap.mudrod.main;
+package org.apache.sdap.mudrod.main;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/package-info.java
index 4efe779..b7b6258 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/package-info.java
@@ -15,4 +15,4 @@
  * This package includes metadata pre-processing, processing, and data structure
  * classes.
  */
-package org.apache.sdap.mudrod.metadata;
+package org.apache.sdap.mudrod.metadata;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
index 32b4609..d66711e 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/ApiHarvester.java
@@ -17,13 +17,12 @@
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
-
-import org.apache.commons.io.IOUtils;
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.utils.HttpRequest;
+import org.apache.commons.io.IOUtils;
 import org.elasticsearch.action.index.IndexRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -57,7 +56,8 @@ public Object execute() {
     //remove old metadata from ES
     es.deleteType(props.getProperty(MudrodConstants.ES_INDEX_NAME), props.getProperty(MudrodConstants.RAW_METADATA_TYPE));
     //harvest new metadata using PO.DAAC web services
-    harvestMetadatafromWeb();
+    if(props.getProperty(MudrodConstants.METADATA_DOWNLOAD).equals("1")) 
+      harvestMetadatafromWeb();
     es.createBulkProcessor();
     addMetadataMapping();
     importToES();
@@ -125,7 +125,9 @@ private void harvestMetadatafromWeb() {
     int doc_length = 0;
     JsonParser parser = new JsonParser();
     do {
-      String searchAPI = "https://podaac.jpl.nasa.gov/api/dataset?startIndex=" + Integer.toString(startIndex) + "&entries=10&sortField=Dataset-AllTimePopularity&sortOrder=asc&id=&value=&search=";
+      //String searchAPI = "https://podaac.jpl.nasa.gov/api/dataset?startIndex=" + Integer.toString(startIndex) + "&entries=10&sortField=Dataset-AllTimePopularity&sortOrder=asc&id=&value=&search=";
+      String searchAPI = props.getProperty(MudrodConstants.METADATA_DOWNLOAD_URL);
+      searchAPI = searchAPI.replace("$startIndex", Integer.toString(startIndex));
       HttpRequest http = new HttpRequest();
       String response = http.getRequest(searchAPI);
 
@@ -148,7 +150,7 @@ private void harvestMetadatafromWeb() {
         int docId = startIndex + i;
         File itemfile = new File(props.getProperty(MudrodConstants.RAW_METADATA_PATH) + "/" + docId + ".json");
 
-        try (FileWriter fw = new FileWriter(itemfile.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw)) {
+        try (FileWriter fw = new FileWriter(itemfile.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw);) {
           itemfile.createNewFile();
           bw.write(item.toString());
         } catch (IOException e) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/MatrixGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/MatrixGenerator.java
index 63565b2..e4a6320 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/MatrixGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/MatrixGenerator.java
@@ -61,7 +61,7 @@ public Object execute() {
     LOG.info("Metadata matrix started");
     startTime = System.currentTimeMillis();
 
-    String metadataMatrixFile = props.getProperty("metadataMatrix");
+    String metadataMatrixFile = props.getProperty(MudrodConstants.METADATA_MATRIX_PATH);
     try {
       MetadataExtractor extractor = new MetadataExtractor();
       JavaPairRDD<String, List<String>> metadataTermsRDD = extractor.loadMetadata(this.es, this.spark.sc, props.getProperty(MudrodConstants.ES_INDEX_NAME), props.getProperty(MudrodConstants.RAW_METADATA_TYPE));
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/package-info.java
index f6ac25e..ffecbc8 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/pre/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes metadata pre-processing functions.
  */
-package org.apache.sdap.mudrod.metadata.pre;
+package org.apache.sdap.mudrod.metadata.pre;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/process/MetadataAnalyzer.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/process/MetadataAnalyzer.java
index 80e23c1..493a055 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/process/MetadataAnalyzer.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/process/MetadataAnalyzer.java
@@ -16,6 +16,7 @@
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.semantics.SVDAnalyzer;
 import org.apache.sdap.mudrod.utils.LinkageTriple;
 import org.slf4j.Logger;
@@ -67,14 +68,14 @@ public Object execute() {
       startTime = System.currentTimeMillis();
 
       SVDAnalyzer analyzer = new SVDAnalyzer(props, es, spark);
-      int svdDimension = Integer.parseInt(props.getProperty("metadataSVDDimension"));
-      String metadataMatrixFile = props.getProperty("metadataMatrix");
-      String svdMatrixFileName = props.getProperty("metadataSVDMatrix_tmp");
+      int svdDimension = Integer.parseInt(props.getProperty(MudrodConstants.METADATA_SVD_DIM));
+      String metadataMatrixFile = props.getProperty(MudrodConstants.METADATA_MATRIX_PATH);
+      String svdMatrixFileName = props.getProperty(MudrodConstants.METADATA_SVD_PATH);
 
       analyzer.getSVDMatrix(metadataMatrixFile, svdDimension, svdMatrixFileName);
       List<LinkageTriple> triples = analyzer.calTermSimfromMatrix(svdMatrixFileName);
 
-      analyzer.saveToES(triples, props.getProperty("indexName"), props.getProperty("metadataLinkageType"));
+      analyzer.saveToES(triples, props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.METADATA_LINKAGE_TYPE);
 
     } catch (Exception e) {
       e.printStackTrace();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/process/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/process/package-info.java
index 4531ad8..a0c0799 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/process/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/process/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes metadata processing classes.
  */
-package org.apache.sdap.mudrod.metadata.process;
+package org.apache.sdap.mudrod.metadata.process;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/Metadata.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/Metadata.java
new file mode 100644
index 0000000..016fd16
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/Metadata.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you
+ * may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sdap.mudrod.metadata.structure;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.sdap.mudrod.driver.ESDriver;
+
+/**
+ * ClassName: PODAACMetadata Function: PODAACMetadata setter and getter methods
+ */
+public abstract class Metadata implements Serializable  {
+
+  private static final long serialVersionUID = 1L;
+  // shortname: data set short name
+  protected String shortname;
+  
+  public Metadata() {
+    // Default constructor
+  }
+
+  /**
+   * Creates a new instance of PODAACMetadata.
+   *
+   * @param shortname data set short name
+   * @param longname  data set long name
+   * @param topics    data set topics
+   * @param terms     data set terms
+   * @param variables data set variables
+   * @param keywords  data set keywords
+   * @param region    list of regions
+   */
+  public Metadata(String shortname) {
+    this.shortname = shortname;
+  }
+
+  /**
+   * getShortName:get short name of data set
+   *
+   * @return data set short name
+   */
+  public String getShortName() {
+    return this.shortname;
+  }
+
+  /**
+   * getAbstract:get abstract of data set
+   *
+   * @return data set abstract
+   */
+  public abstract List<String> getAllTermList();
+}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/MetadataExtractor.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/MetadataExtractor.java
index 379d5b9..c620c73 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/MetadataExtractor.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/MetadataExtractor.java
@@ -54,7 +54,7 @@ public MetadataExtractor() {
    * list extracted from metadata variables.
    */
   public JavaPairRDD<String, List<String>> loadMetadata(ESDriver es, JavaSparkContext sc, String index, String type) {
-    List<PODAACMetadata> metadatas = this.loadMetadataFromES(es, index, type);
+    List<Metadata> metadatas = this.loadMetadataFromES(es, index, type);
     JavaPairRDD<String, List<String>> metadataTermsRDD = this.buildMetadataRDD(es, sc, index, metadatas);
     return metadataTermsRDD;
   }
@@ -67,15 +67,15 @@ public MetadataExtractor() {
    * @param type  metadata type name
    * @return metadata list
    */
-  protected List<PODAACMetadata> loadMetadataFromES(ESDriver es, String index, String type) {
+  protected List<Metadata> loadMetadataFromES(ESDriver es, String index, String type) {
 
-    List<PODAACMetadata> metadatas = new ArrayList<PODAACMetadata>();
+    List<Metadata> metadatas = new ArrayList<Metadata>();
     SearchResponse scrollResp = es.getClient().prepareSearch(index).setTypes(type).setQuery(QueryBuilders.matchAllQuery()).setScroll(new TimeValue(60000)).setSize(100).execute().actionGet();
 
     while (true) {
       for (SearchHit hit : scrollResp.getHits().getHits()) {
         Map<String, Object> result = hit.getSource();
-        String shortname = (String) result.get("Dataset-ShortName");
+        /*String shortname = (String) result.get("Dataset-ShortName");
         List<String> topic = (List<String>) result.get("DatasetParameter-Topic");
         List<String> term = (List<String>) result.get("DatasetParameter-Term");
         List<String> keyword = (List<String>) result.get("Dataset-Metadata");
@@ -91,7 +91,10 @@ public MetadataExtractor() {
         } catch (InterruptedException | ExecutionException e) {
           e.printStackTrace();
 
-        }
+        }*/
+        
+        // change PODAACMetadata class for other kind of metadata !!! important
+        Metadata metadata = new PODAACMetadata(result, es, index);
         metadatas.add(metadata);
       }
       scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
@@ -113,16 +116,16 @@ public MetadataExtractor() {
    * @return PairRDD, in each pair key is metadata short name and value is term
    * list extracted from metadata variables.
    */
-  protected JavaPairRDD<String, List<String>> buildMetadataRDD(ESDriver es, JavaSparkContext sc, String index, List<PODAACMetadata> metadatas) {
-    JavaRDD<PODAACMetadata> metadataRDD = sc.parallelize(metadatas);
-    JavaPairRDD<String, List<String>> metadataTermsRDD = metadataRDD.mapToPair(new PairFunction<PODAACMetadata, String, List<String>>() {
+  protected JavaPairRDD<String, List<String>> buildMetadataRDD(ESDriver es, JavaSparkContext sc, String index, List<Metadata> metadatas) {
+    JavaRDD<Metadata> metadataRDD = sc.parallelize(metadatas);
+    JavaPairRDD<String, List<String>> metadataTermsRDD = metadataRDD.mapToPair(new PairFunction<Metadata, String, List<String>>() {
       /**
        *
        */
       private static final long serialVersionUID = 1L;
 
       @Override
-      public Tuple2<String, List<String>> call(PODAACMetadata metadata) throws Exception {
+      public Tuple2<String, List<String>> call(Metadata metadata) throws Exception {
         return new Tuple2<String, List<String>>(metadata.getShortName(), metadata.getAllTermList());
       }
     }).reduceByKey(new Function2<List<String>, List<String>, List<String>>() {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
index 4c3070b..3624d1d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/PODAACMetadata.java
@@ -16,322 +16,368 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+
+import org.apache.sdap.mudrod.driver.ESDriver;
 
 /**
  * ClassName: PODAACMetadata Function: PODAACMetadata setter and getter methods
  */
-public class PODAACMetadata implements Serializable {
-
-  /**
-   *
-   */
-  private static final long serialVersionUID = 1L;
-  // shortname: data set short name
-  private String shortname;
-  // abstractStr: data set abstract
-  private String abstractStr;
-  // isoTopic: data set topic
-  private String isoTopic;
-  // sensor: sensor
-  private String sensor;
-  // source: data source
-  private String source;
-  // project: data project
-  private String project;
-  // hasAbstarct: whether data set has abstract
-  boolean hasAbstarct;
-
-  // longnameList: data set long name list
-  private List<String> longnameList;
-  // keywordList:data set key word list
-  private List<String> keywordList;
-  // termList: data set term list
-  private List<String> termList;
-  // topicList: data set topic list
-  private List<String> topicList;
-  // variableList: data set variable list
-  private List<String> variableList;
-  // abstractList: data set abstract term list
-  private List<String> abstractList;
-  // isotopicList: data set iso topic list
-  private List<String> isotopicList;
-  // sensorList: data set sensor list
-  private List<String> sensorList;
-  // sourceList: data set source list
-  private List<String> sourceList;
-  // projectList: data set project list
-  private List<String> projectList;
-  // regionList: data set region list
-  private List<String> regionList;
-
-  public PODAACMetadata() {
-    // Default constructor
-  }
-
-  /**
-   * Creates a new instance of PODAACMetadata.
-   *
-   * @param shortname data set short name
-   * @param longname  data set long name
-   * @param topics    data set topics
-   * @param terms     data set terms
-   * @param variables data set variables
-   * @param keywords  data set keywords
-   * @param region    list of regions
-   */
-  public PODAACMetadata(String shortname, List<String> longname, List<String> topics, List<String> terms, List<String> variables, List<String> keywords, List<String> region) {
-    this.shortname = shortname;
-    this.longnameList = longname;
-    this.keywordList = keywords;
-    this.termList = terms;
-    this.topicList = topics;
-    this.variableList = variables;
-    this.regionList = region;
-  }
-
-  /**
-   * setTerms: set term of data set
-   *
-   * @param termstr data set terms
-   */
-  public void setTerms(String termstr) {
-    this.splitString(termstr, this.termList);
-  }
-
-  /**
-   * setKeywords: set key word of data set
-   *
-   * @param keywords data set keywords
-   */
-  public void setKeywords(String keywords) {
-    this.splitString(keywords, this.keywordList);
-  }
-
-  /**
-   * setTopicList: set topic of data set
-   *
-   * @param topicStr data set topics
-   */
-  public void setTopicList(String topicStr) {
-    this.splitString(topicStr, this.topicList);
-  }
-
-  /**
-   * setVaraliableList: set varilable of data set
-   *
-   * @param varilableStr data set variables
-   */
-  public void setVaraliableList(String varilableStr) {
-    this.splitString(varilableStr, this.variableList);
-  }
-
-  /**
-   * setProjectList:set project of data set
-   *
-   * @param project data set projects
-   */
-  public void setProjectList(String project) {
-    this.splitString(project, this.projectList);
-  }
-
-  /**
-   * setSourceList: set source of data set
-   *
-   * @param source data set sources
-   */
-  public void setSourceList(String source) {
-    this.splitString(source, this.sourceList);
-  }
-
-  /**
-   * setSensorList: set sensor of data set
-   *
-   * @param sensor data set sensors
-   */
-  public void setSensorList(String sensor) {
-    this.splitString(sensor, this.sensorList);
-  }
-
-  /**
-   * setISOTopicList:set iso topic of data set
-   *
-   * @param isoTopic data set iso topics
-   */
-  public void setISOTopicList(String isoTopic) {
-    this.splitString(isoTopic, this.isotopicList);
-  }
-
-  /**
-   * getKeywordList: get key word of data set
-   *
-   * @return data set keyword list
-   */
-  public List<String> getKeywordList() {
-    return this.keywordList;
-  }
-
-  /**
-   * getTermList:get term list of data set
-   *
-   * @return data set term list
-   */
-  public List<String> getTermList() {
-    return this.termList;
-  }
-
-  /**
-   * getShortName:get short name of data set
-   *
-   * @return data set short name
-   */
-  public String getShortName() {
-    return this.shortname;
-  }
-
-  /**
-   * getKeyword:get key word of data set
-   *
-   * @return data set keyword string
-   */
-  public String getKeyword() {
-    return String.join(",", this.keywordList);
-  }
-
-  /**
-   * getTerm:get term of data set
-   *
-   * @return data set term string
-   */
-  public String getTerm() {
-    return String.join(",", this.termList);
-  }
-
-  /**
-   * getTopic:get topic of data set
-   *
-   * @return data set topic string
-   */
-  public String getTopic() {
-    return String.join(",", this.topicList);
-  }
-
-  /**
-   * getVariable:get variable of data set
-   *
-   * @return data set variable string
-   */
-  public String getVariable() {
-    return String.join(",", this.variableList);
-  }
-
-  /**
-   * getAbstract:get abstract of data set
-   *
-   * @return data set abstract
-   */
-  public String getAbstract() {
-    return this.abstractStr;
-  }
-
-  /**
-   * getProject:get project of data set
-   *
-   * @return data set project string
-   */
-  public String getProject() {
-    return this.project;
-  }
-
-  /**
-   * getSource:get source of data set
-   *
-   * @return data set source string
-   */
-  public String getSource() {
-    return this.source;
-  }
-
-  /**
-   * getSensor:get sensor of data set
-   *
-   * @return data set sensor string
-   */
-  public String getSensor() {
-    return this.sensor;
-  }
-
-  /**
-   * getISOTopic:get iso topic of data set
-   *
-   * @return data set ISO topic string
-   */
-  public String getISOTopic() {
-    return this.isoTopic;
-  }
-
-  /**
-   * getAllTermList: get all term list of data set
-   *
-   * @return data set term list
-   */
-  public List<String> getAllTermList() {
-    List<String> allterms = new ArrayList<>();
-
-    if (this.termList != null && !this.termList.isEmpty()) {
-      allterms.addAll(this.termList);
-    }
-
-    if (this.keywordList != null && !this.keywordList.isEmpty()) {
-      allterms.addAll(this.keywordList);
-    }
-
-    if (this.topicList != null && !this.topicList.isEmpty()) {
-      allterms.addAll(this.topicList);
-    }
-
-    if (this.variableList != null && !this.variableList.isEmpty()) {
-      allterms.addAll(this.variableList);
-    }
-
-    if (this.regionList != null && !this.regionList.isEmpty()) {
-      allterms.addAll(this.regionList);
-    }
-    return allterms;
-  }
-
-  /**
-   * splitString: split value of fields of data set
-   *
-   * @param oristr original string
-   * @param list   result after splitting
-   */
-  private void splitString(String oristr, List<String> list) {
-    if (oristr == null) {
-      return;
-    }
-
-    if (oristr.startsWith("\"")) {
-      oristr = oristr.substring(1);
-    }
-    if (oristr.endsWith("\"")) {
-      oristr = oristr.substring(0, oristr.length() - 1);
-    }
-
-    String strs[] = oristr.trim().split(",");
-    if (strs != null) {
-      for (String str1 : strs) {
-        String str = str1.trim();
-        if (str.startsWith(",") || str.startsWith("\"")) {
-          str = str.substring(1);
-        }
-        if (str.endsWith(",") || str.endsWith("\"")) {
-          str = str.substring(0, str.length() - 1);
-        }
-        if ("".equals(str)) {
-          continue;
-        }
-        list.add(str);
-      }
-    }
-  }
+public class PODAACMetadata extends Metadata {
+
+	/**
+	 *
+	 */
+	private static final long serialVersionUID = 1L;
+	// shortname: data set short name
+	// private String shortname;
+	// abstractStr: data set abstract
+	private String abstractStr;
+	// isoTopic: data set topic
+	private String isoTopic;
+	// sensor: sensor
+	private String sensor;
+	// source: data source
+	private String source;
+	// project: data project
+	private String project;
+	// hasAbstarct: whether data set has abstract
+	boolean hasAbstarct;
+
+	// longnameList: data set long name list
+	private List<String> longnameList;
+	// keywordList:data set key word list
+	private List<String> keywordList;
+	// termList: data set term list
+	private List<String> termList;
+	// topicList: data set topic list
+	private List<String> topicList;
+	// variableList: data set variable list
+	private List<String> variableList;
+	// abstractList: data set abstract term list
+	private List<String> abstractList;
+	// isotopicList: data set iso topic list
+	private List<String> isotopicList;
+	// sensorList: data set sensor list
+	private List<String> sensorList;
+	// sourceList: data set source list
+	private List<String> sourceList;
+	// projectList: data set project list
+	private List<String> projectList;
+	// regionList: data set region list
+	private List<String> regionList;
+
+	public PODAACMetadata() {
+		// Default constructor
+	}
+
+	/**
+	 * Creates a new instance of PODAACMetadata.
+	 *
+	 * @param shortname
+	 *            data set short name
+	 * @param longname
+	 *            data set long name
+	 * @param topics
+	 *            data set topics
+	 * @param terms
+	 *            data set terms
+	 * @param variables
+	 *            data set variables
+	 * @param keywords
+	 *            data set keywords
+	 * @param region
+	 *            list of regions
+	 */
+	public PODAACMetadata(String shortname, List<String> longname, List<String> topics, List<String> terms,
+			List<String> variables, List<String> keywords, List<String> region) {
+		this.shortname = shortname;
+		this.longnameList = longname;
+		this.keywordList = keywords;
+		this.termList = terms;
+		this.topicList = topics;
+		this.variableList = variables;
+		this.regionList = region;
+	}
+
+	public PODAACMetadata(Map<String, Object> result, ESDriver es, String index) {
+
+		String shortname = (String) result.get("Dataset-ShortName");
+		List<String> topic = (List<String>) result.get("DatasetParameter-Topic");
+		List<String> term = (List<String>) result.get("DatasetParameter-Term");
+		List<String> keyword = (List<String>) result.get("Dataset-Metadata");
+		List<String> variable = (List<String>) result.get("DatasetParameter-Variable");
+		List<String> longname = (List<String>) result.get("DatasetProject-Project-LongName");
+		List<String> region = (List<String>) result.get("DatasetRegion-Region");
+
+		this.shortname = shortname;
+		this.longnameList = longname;
+		try {
+			this.keywordList = es.customAnalyzing(index, keyword);
+			this.termList = es.customAnalyzing(index, term);
+			this.topicList = es.customAnalyzing(index, topic);
+			this.variableList = es.customAnalyzing(index, variable);
+			this.regionList = es.customAnalyzing(index, region);
+		} catch (InterruptedException | ExecutionException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+	}
+
+	/**
+	 * setTerms: set term of data set
+	 *
+	 * @param termstr
+	 *            data set terms
+	 */
+	public void setTerms(String termstr) {
+		this.splitString(termstr, this.termList);
+	}
+
+	/**
+	 * setKeywords: set key word of data set
+	 *
+	 * @param keywords
+	 *            data set keywords
+	 */
+	public void setKeywords(String keywords) {
+		this.splitString(keywords, this.keywordList);
+	}
+
+	/**
+	 * setTopicList: set topic of data set
+	 *
+	 * @param topicStr
+	 *            data set topics
+	 */
+	public void setTopicList(String topicStr) {
+		this.splitString(topicStr, this.topicList);
+	}
+
+	/**
+	 * setVaraliableList: set varilable of data set
+	 *
+	 * @param varilableStr
+	 *            data set variables
+	 */
+	public void setVaraliableList(String varilableStr) {
+		this.splitString(varilableStr, this.variableList);
+	}
+
+	/**
+	 * setProjectList:set project of data set
+	 *
+	 * @param project
+	 *            data set projects
+	 */
+	public void setProjectList(String project) {
+		this.splitString(project, this.projectList);
+	}
+
+	/**
+	 * setSourceList: set source of data set
+	 *
+	 * @param source
+	 *            data set sources
+	 */
+	public void setSourceList(String source) {
+		this.splitString(source, this.sourceList);
+	}
+
+	/**
+	 * setSensorList: set sensor of data set
+	 *
+	 * @param sensor
+	 *            data set sensors
+	 */
+	public void setSensorList(String sensor) {
+		this.splitString(sensor, this.sensorList);
+	}
+
+	/**
+	 * setISOTopicList:set iso topic of data set
+	 *
+	 * @param isoTopic
+	 *            data set iso topics
+	 */
+	public void setISOTopicList(String isoTopic) {
+		this.splitString(isoTopic, this.isotopicList);
+	}
+
+	/**
+	 * getKeywordList: get key word of data set
+	 *
+	 * @return data set keyword list
+	 */
+	public List<String> getKeywordList() {
+		return this.keywordList;
+	}
+
+	/**
+	 * getTermList:get term list of data set
+	 *
+	 * @return data set term list
+	 */
+	public List<String> getTermList() {
+		return this.termList;
+	}
+
+	/**
+	 * getShortName:get short name of data set
+	 *
+	 * @return data set short name
+	 */
+	/*
+	 * public String getShortName() { return this.shortname; }
+	 */
+
+	/**
+	 * getKeyword:get key word of data set
+	 *
+	 * @return data set keyword string
+	 */
+	public String getKeyword() {
+		return String.join(",", this.keywordList);
+	}
+
+	/**
+	 * getTerm:get term of data set
+	 *
+	 * @return data set term string
+	 */
+	public String getTerm() {
+		return String.join(",", this.termList);
+	}
+
+	/**
+	 * getTopic:get topic of data set
+	 *
+	 * @return data set topic string
+	 */
+	public String getTopic() {
+		return String.join(",", this.topicList);
+	}
+
+	/**
+	 * getVariable:get variable of data set
+	 *
+	 * @return data set variable string
+	 */
+	public String getVariable() {
+		return String.join(",", this.variableList);
+	}
+
+	/**
+	 * getAbstract:get abstract of data set
+	 *
+	 * @return data set abstract
+	 */
+	public String getAbstract() {
+		return this.abstractStr;
+	}
+
+	/**
+	 * getProject:get project of data set
+	 *
+	 * @return data set project string
+	 */
+	public String getProject() {
+		return this.project;
+	}
+
+	/**
+	 * getSource:get source of data set
+	 *
+	 * @return data set source string
+	 */
+	public String getSource() {
+		return this.source;
+	}
+
+	/**
+	 * getSensor:get sensor of data set
+	 *
+	 * @return data set sensor string
+	 */
+	public String getSensor() {
+		return this.sensor;
+	}
+
+	/**
+	 * getISOTopic:get iso topic of data set
+	 *
+	 * @return data set ISO topic string
+	 */
+	public String getISOTopic() {
+		return this.isoTopic;
+	}
+
+	/**
+	 * getAllTermList: get all term list of data set
+	 *
+	 * @return data set term list
+	 */
+	public List<String> getAllTermList() {
+		List<String> allterms = new ArrayList<>();
+
+		if (this.termList != null && !this.termList.isEmpty()) {
+			allterms.addAll(this.termList);
+		}
+
+		if (this.keywordList != null && !this.keywordList.isEmpty()) {
+			allterms.addAll(this.keywordList);
+		}
+
+		if (this.topicList != null && !this.topicList.isEmpty()) {
+			allterms.addAll(this.topicList);
+		}
+
+		if (this.variableList != null && !this.variableList.isEmpty()) {
+			allterms.addAll(this.variableList);
+		}
+
+		if (this.regionList != null && !this.regionList.isEmpty()) {
+			allterms.addAll(this.regionList);
+		}
+		return allterms;
+	}
+
+	/**
+	 * splitString: split value of fields of data set
+	 *
+	 * @param oristr
+	 *            original string
+	 * @param list
+	 *            result after splitting
+	 */
+	private void splitString(String oristr, List<String> list) {
+		if (oristr == null) {
+			return;
+		}
+
+		if (oristr.startsWith("\"")) {
+			oristr = oristr.substring(1);
+		}
+		if (oristr.endsWith("\"")) {
+			oristr = oristr.substring(0, oristr.length() - 1);
+		}
+
+		String strs[] = oristr.trim().split(",");
+		if (strs != null) {
+			for (int i = 0; i < strs.length; i++) {
+				String str = strs[i].trim();
+				if (str.startsWith(",") || str.startsWith("\"")) {
+					str = str.substring(1);
+				}
+				if (str.endsWith(",") || str.endsWith("\"")) {
+					str = str.substring(0, str.length() - 1);
+				}
+				if (str == "") {
+					continue;
+				}
+				list.add(str);
+			}
+		}
+	}
 }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/package-info.java
index 1fa99c9..938b4ac 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/metadata/structure/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes classes needed for metadata analysis
  */
-package org.apache.sdap.mudrod.metadata.structure;
+package org.apache.sdap.mudrod.metadata.structure;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/package-info.java
index ff51407..44596e3 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes ontology pre-processing and processing classes.
  */
-package org.apache.sdap.mudrod.ontology;
+package org.apache.sdap.mudrod.ontology;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
index e988d15..90883b0 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/AggregateTriples.java
@@ -13,10 +13,12 @@
  */
 package org.apache.sdap.mudrod.ontology.pre;
 
-import org.apache.commons.io.FilenameUtils;
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+
+import org.apache.commons.io.FilenameUtils;
 import org.jdom2.Document;
 import org.jdom2.Element;
 import org.jdom2.JDOMException;
@@ -51,7 +53,7 @@ public AggregateTriples(Properties props, ESDriver es, SparkDriver spark) {
    */
   @Override
   public Object execute() {
-    File file = new File(this.props.getProperty("oceanTriples"));
+    File file = new File(this.props.getProperty(MudrodConstants.ONTOLOGY_PATH));
     if (file.exists()) {
       file.delete();
     }
@@ -69,7 +71,7 @@ public Object execute() {
       e.printStackTrace();
     }
 
-    File[] files = new File(this.props.getProperty("ontologyInputDir")).listFiles();
+    File[] files = new File(this.props.getProperty(MudrodConstants.ONTOLOGY_INPUT_PATH)).listFiles();
     for (File file_in : files) {
       String ext = FilenameUtils.getExtension(file_in.getAbsolutePath());
       if ("owl".equals(ext)) {
@@ -167,8 +169,8 @@ public Element findChild(String str, Element ele) {
   public void getAllClass() throws IOException {
     List<?> classElements = rootNode.getChildren("Class", Namespace.getNamespace("owl", owl_namespace));
 
-    for (Object classElement1 : classElements) {
-      Element classElement = (Element) classElement1;
+    for (int i = 0; i < classElements.size(); i++) {
+      Element classElement = (Element) classElements.get(i);
       String className = classElement.getAttributeValue("about", Namespace.getNamespace("rdf", rdf_namespace));
 
       if (className == null) {
@@ -176,8 +178,8 @@ public void getAllClass() throws IOException {
       }
 
       List<?> subclassElements = classElement.getChildren("subClassOf", Namespace.getNamespace("rdfs", rdfs_namespace));
-      for (Object subclassElement1 : subclassElements) {
-        Element subclassElement = (Element) subclassElement1;
+      for (int j = 0; j < subclassElements.size(); j++) {
+        Element subclassElement = (Element) subclassElements.get(j);
         String subclassName = subclassElement.getAttributeValue("resource", Namespace.getNamespace("rdf", rdf_namespace));
         if (subclassName == null) {
           Element allValuesFromEle = findChild("allValuesFrom", subclassElement);
@@ -192,8 +194,8 @@ public void getAllClass() throws IOException {
       }
 
       List equalClassElements = classElement.getChildren("equivalentClass", Namespace.getNamespace("owl", owl_namespace));
-      for (Object equalClassElement1 : equalClassElements) {
-        Element equalClassElement = (Element) equalClassElement1;
+      for (int k = 0; k < equalClassElements.size(); k++) {
+        Element equalClassElement = (Element) equalClassElements.get(k);
         String equalClassElementName = equalClassElement.getAttributeValue("resource", Namespace.getNamespace("rdf", rdf_namespace));
 
         if (equalClassElementName != null) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/package-info.java
index 1ba2f4a..3f7c87e 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/pre/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes ontology pre-processing classes.
  */
-package org.apache.sdap.mudrod.ontology.pre;
+package org.apache.sdap.mudrod.ontology.pre;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipCOROntology.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipCOROntology.java
index 45d04a8..f775937 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipCOROntology.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipCOROntology.java
@@ -13,10 +13,10 @@
  */
 package org.apache.sdap.mudrod.ontology.process;
 
-import java.util.Iterator;
-
 import org.apache.sdap.mudrod.ontology.Ontology;
 
+import java.util.Iterator;
+
 /**
  * @author lewismc
  */
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipPortalOntology.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipPortalOntology.java
index c989a29..a36a013 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipPortalOntology.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/EsipPortalOntology.java
@@ -13,10 +13,10 @@
  */
 package org.apache.sdap.mudrod.ontology.process;
 
-import java.util.Iterator;
-
 import org.apache.sdap.mudrod.ontology.Ontology;
 
+import java.util.Iterator;
+
 /**
  * @author lewismc
  */
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
index f106594..df32643 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/LocalOntology.java
@@ -13,6 +13,8 @@
  */
 package org.apache.sdap.mudrod.ontology.process;
 
+import org.apache.sdap.mudrod.ontology.Ontology;
+
 import org.apache.jena.ontology.Individual;
 import org.apache.jena.ontology.OntClass;
 import org.apache.jena.ontology.OntModel;
@@ -24,7 +26,6 @@
 import org.apache.jena.rdf.model.ModelFactory;
 import org.apache.jena.rdf.model.Resource;
 import org.apache.jena.shared.PrefixMapping;
-import org.apache.sdap.mudrod.ontology.Ontology;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -116,10 +117,12 @@ public void load() {
    */
   @Override
   public void load(String[] urls) {
-    for (String url1 : urls) {
-      String url = url1.trim();
-      if (!"".equals(url) && LOG.isInfoEnabled())
-        LOG.info("Reading and processing {}", url);
+    for (int i = 0; i < urls.length; i++) {
+      String url = urls[i].trim();
+      if (!"".equals(url))
+        if (LOG.isInfoEnabled()) {
+          LOG.info("Reading and processing {}", url);
+        }
       load(ontologyModel, url);
     }
     parser.parse(ontology, ontologyModel);
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyLinkCal.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyLinkCal.java
index eb6aeff..1e220a6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyLinkCal.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyLinkCal.java
@@ -16,6 +16,8 @@
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
@@ -35,7 +37,7 @@
 
   public OntologyLinkCal(Properties props, ESDriver es, SparkDriver spark) {
     super(props, es, spark);
-    es.deleteAllByQuery(props.getProperty("indexName"), props.getProperty("ontologyLinkageType"), QueryBuilders.matchAllQuery());
+    es.deleteAllByQuery(props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.ONTOLOGY_LINKAGE_TYPE, QueryBuilders.matchAllQuery());
     addSWEETMapping();
   }
 
@@ -45,12 +47,13 @@ public OntologyLinkCal(Properties props, ESDriver es, SparkDriver spark) {
   public void addSWEETMapping() {
     XContentBuilder Mapping;
     try {
-      Mapping = jsonBuilder().startObject().startObject(props.getProperty("ontologyLinkageType")).startObject("properties").startObject("concept_A").field("type", "string")
+      Mapping = jsonBuilder().startObject().startObject(MudrodConstants.ONTOLOGY_LINKAGE_TYPE).startObject("properties").startObject("concept_A").field("type", "string")
           .field("index", "not_analyzed").endObject().startObject("concept_B").field("type", "string").field("index", "not_analyzed").endObject()
 
           .endObject().endObject().endObject();
 
-      es.getClient().admin().indices().preparePutMapping(props.getProperty("indexName")).setType(props.getProperty("ontologyLinkageType")).setSource(Mapping).execute().actionGet();
+      es.getClient().admin().indices().preparePutMapping(props.getProperty(MudrodConstants.ES_INDEX_NAME))
+      .setType(MudrodConstants.ONTOLOGY_LINKAGE_TYPE).setSource(Mapping).execute().actionGet();
     } catch (IOException e) {
       e.printStackTrace();
     }
@@ -61,7 +64,7 @@ public void addSWEETMapping() {
    */
   @Override
   public Object execute() {
-    es.deleteType(props.getProperty("indexName"), props.getProperty("ontologyLinkageType"));
+    es.deleteType(props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.ONTOLOGY_LINKAGE_TYPE);
     es.createBulkProcessor();
 
     BufferedReader br = null;
@@ -69,7 +72,7 @@ public Object execute() {
     double weight = 0;
 
     try {
-      br = new BufferedReader(new FileReader(props.getProperty("oceanTriples")));
+      br = new BufferedReader(new FileReader(props.getProperty(MudrodConstants.ONTOLOGY_PATH)));
       while ((line = br.readLine()) != null) {
         String[] strList = line.toLowerCase().split(",");
         if (strList[1].equals("subclassof")) {
@@ -78,9 +81,9 @@ public Object execute() {
           weight = 0.9;
         }
 
-        IndexRequest ir = new IndexRequest(props.getProperty("indexName"), props.getProperty("ontologyLinkageType")).source(
-            jsonBuilder().startObject().field("concept_A", es.customAnalyzing(props.getProperty("indexName"), strList[2]))
-                .field("concept_B", es.customAnalyzing(props.getProperty("indexName"), strList[0])).field("weight", weight).endObject());
+        IndexRequest ir = new IndexRequest(props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.ONTOLOGY_LINKAGE_TYPE).source(
+            jsonBuilder().startObject().field("concept_A", es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), strList[2]))
+                .field("concept_B", es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), strList[0])).field("weight", weight).endObject());
         es.getBulkProcessor().add(ir);
 
       }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyParser.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyParser.java
index 6e2a5f2..9eabb7d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyParser.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OntologyParser.java
@@ -15,6 +15,7 @@
 
 import org.apache.jena.ontology.OntClass;
 import org.apache.jena.ontology.OntModel;
+
 import org.apache.sdap.mudrod.ontology.Ontology;
 
 import java.util.Iterator;
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
index 3f12cd6..d2c57a3 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/OwlParser.java
@@ -17,13 +17,15 @@
 import org.apache.jena.ontology.OntClass;
 import org.apache.jena.ontology.OntModel;
 import org.apache.jena.rdf.model.Literal;
-import org.apache.sdap.mudrod.ontology.Ontology;
 
 import com.esotericsoftware.minlog.Log;
 
+import org.apache.sdap.mudrod.ontology.Ontology;
+
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/package-info.java
index c9b380c..832bfbd 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ontology/process/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ontology/process/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes ontology processing classes.
  */
-package org.apache.sdap.mudrod.ontology.process;
+package org.apache.sdap.mudrod.ontology.process;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/package-info.java
index dac3ca7..4c6bbb1 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/package-info.java
@@ -15,4 +15,4 @@
  * This package includes the preprocessing, processing, and data structure used
  * by recommendation module.
  */
-package org.apache.sdap.mudrod.recommendation;
+package org.apache.sdap.mudrod.recommendation;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/ImportMetadata.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/ImportMetadata.java
index 7bb1d22..9e54859 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/ImportMetadata.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/ImportMetadata.java
@@ -15,13 +15,12 @@
 
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
-
-import org.apache.commons.io.IOUtils;
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.metadata.pre.ApiHarvester;
+import org.apache.commons.io.IOUtils;
 import org.elasticsearch.action.index.IndexRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -65,8 +64,8 @@ public void addMetadataMapping() {
     String mappingJson = "{\r\n   \"dynamic_templates\": " + "[\r\n      " + "{\r\n         \"strings\": " + "{\r\n            \"match_mapping_type\": \"string\","
         + "\r\n            \"mapping\": {\r\n               \"type\": \"string\"," + "\r\n               \"analyzer\": \"csv\"\r\n            }" + "\r\n         }\r\n      }\r\n   ]\r\n}";
 
-    es.getClient().admin().indices().preparePutMapping(props.getProperty(MudrodConstants.ES_INDEX_NAME)).setType(props.getProperty("recom_metadataType")).setSource(mappingJson).execute().actionGet();
-
+    es.getClient().admin().indices().preparePutMapping(props.getProperty(MudrodConstants.ES_INDEX_NAME))
+    .setType(MudrodConstants.RECOM_METADATA_TYPE).setSource(mappingJson).execute().actionGet();
   }
 
   /**
@@ -75,7 +74,7 @@ public void addMetadataMapping() {
    * invoking this method.
    */
   private void importToES() {
-    es.deleteType(props.getProperty("indexName"), props.getProperty("recom_metadataType"));
+    es.deleteType(props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.RECOM_METADATA_TYPE);
 
     es.createBulkProcessor();
     File directory = new File(props.getProperty(MudrodConstants.RAW_METADATA_PATH));
@@ -88,9 +87,8 @@ private void importToES() {
           String jsonTxt = IOUtils.toString(is);
           JsonParser parser = new JsonParser();
           JsonElement item = parser.parse(jsonTxt);
-          IndexRequest ir = new IndexRequest(props.getProperty(MudrodConstants.ES_INDEX_NAME), props.getProperty("recom_metadataType")).source(item.toString());
-
-          // preprocessdata
+          IndexRequest ir = new IndexRequest(props.getProperty(MudrodConstants.ES_INDEX_NAME), 
+              MudrodConstants.RECOM_METADATA_TYPE).source(item.toString());
 
           es.getBulkProcessor().add(ir);
         } catch (IOException e) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/MetadataTFIDFGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/MetadataTFIDFGenerator.java
index eb22630..72f0bb9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/MetadataTFIDFGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/MetadataTFIDFGenerator.java
@@ -1,8 +1,8 @@
 /*
- * Licensed under the Apache License, Version 2.0 (the "License"); you
- * may not use this file except in compliance with the License.
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
  * You may obtain a copy of the License at
- *
+ * 
  * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
@@ -11,12 +11,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+
 package org.apache.sdap.mudrod.recommendation.pre;
 
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
-import org.apache.sdap.mudrod.recommendation.structure.MetadataOpt;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.recommendation.structure.MetadataTokenizer;
 import org.apache.sdap.mudrod.utils.LabeledRowMatrix;
 import org.apache.sdap.mudrod.utils.MatrixUtil;
 import org.apache.spark.api.java.JavaPairRDD;
@@ -24,6 +30,7 @@
 import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 
@@ -70,35 +77,35 @@ public Object execute(Object o) {
 
   public LabeledRowMatrix generateWordBasedTFIDF() throws Exception {
 
-    MetadataOpt opt = new MetadataOpt(props);
+    MetadataTokenizer opt = new MetadataTokenizer(props);
 
-    JavaPairRDD<String, String> metadataContents = opt.loadAll(es, spark);
+    String metadataName = props.getProperty(MudrodConstants.METADATA_ID);
+    JavaPairRDD<String, String> metadataContents = opt.loadAll(es, spark, metadataName);
 
     JavaPairRDD<String, List<String>> metadataWords = opt.tokenizeData(metadataContents, " ");
 
     LabeledRowMatrix wordtfidfMatrix = opt.tFIDFTokens(metadataWords, spark);
 
-    MatrixUtil.exportToCSV(wordtfidfMatrix.rowMatrix, wordtfidfMatrix.rowkeys, wordtfidfMatrix.colkeys, props.getProperty("metadata_word_tfidf_matrix"));
+    MatrixUtil.exportToCSV(wordtfidfMatrix.rowMatrix, wordtfidfMatrix.rowkeys, wordtfidfMatrix.colkeys, props.getProperty(MudrodConstants.METADATA_WORD_MATRIX_PATH));
 
     return wordtfidfMatrix;
   }
 
   public LabeledRowMatrix generateTermBasedTFIDF() throws Exception {
 
-    MetadataOpt opt = new MetadataOpt(props);
+    MetadataTokenizer opt = new MetadataTokenizer(props);
 
-    List<String> variables = new ArrayList<>();
-    variables.add("DatasetParameter-Term");
-    variables.add("DatasetParameter-Variable");
-    variables.add("Dataset-ExtractTerm");
+    String source = props.getProperty(MudrodConstants.SEMANTIC_FIELDS);
+    List<String> variables = new ArrayList<String>(Arrays.asList(source.split(",")));
 
-    JavaPairRDD<String, String> metadataContents = opt.loadAll(es, spark, variables);
+    String metadataName = props.getProperty(MudrodConstants.METADATA_ID);
+    JavaPairRDD<String, String> metadataContents = opt.loadAll(es, spark, variables, metadataName);
 
     JavaPairRDD<String, List<String>> metadataTokens = opt.tokenizeData(metadataContents, ",");
 
     LabeledRowMatrix tokentfidfMatrix = opt.tFIDFTokens(metadataTokens, spark);
 
-    MatrixUtil.exportToCSV(tokentfidfMatrix.rowMatrix, tokentfidfMatrix.rowkeys, tokentfidfMatrix.colkeys, props.getProperty("metadata_term_tfidf_matrix"));
+    MatrixUtil.exportToCSV(tokentfidfMatrix.rowMatrix, tokentfidfMatrix.rowkeys, tokentfidfMatrix.colkeys, props.getProperty(MudrodConstants.METADATA_TERM_MATRIX_PATH));
 
     return tokentfidfMatrix;
   }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeFeatures.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeFeatures.java
new file mode 100644
index 0000000..7a58484
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeFeatures.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+package org.apache.sdap.mudrod.recommendation.pre;
+
+import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.recommendation.structure.MetadataFeature;
+import org.apache.sdap.mudrod.recommendation.structure.PODAACMetadataFeature;
+
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.regex.Pattern;
+
+public class NormalizeFeatures extends DiscoveryStepAbstract {
+
+  /**
+   *
+   */
+  private static final long serialVersionUID = 1L;
+  private static final Logger LOG = LoggerFactory.getLogger(NormalizeFeatures.class);
+  // index name
+  private String indexName;
+  // type name of metadata in ES
+  private String metadataType;
+
+  /**
+   * Creates a new instance of OHEncoder.
+   *
+   * @param props the Mudrod configuration
+   * @param es    an instantiated {@link ESDriver}
+   * @param spark an instantiated {@link SparkDriver}
+   */
+  public NormalizeFeatures(Properties props, ESDriver es, SparkDriver spark) {
+    super(props, es, spark);
+    indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME);
+    metadataType = MudrodConstants.RECOM_METADATA_TYPE;
+  }
+
+  @Override
+  public Object execute() {
+    LOG.info("*****************proprocessing metadata feature starts******************");
+    startTime = System.currentTimeMillis();
+
+    normalizeMetadataVariables(es);
+
+    endTime = System.currentTimeMillis();
+    LOG.info("*****************proprocessing metadata feature ends******************Took {}s", (endTime - startTime) / 1000);
+
+    return null;
+  }
+
+  @Override
+  public Object execute(Object o) {
+    return null;
+  }
+
+  public void normalizeMetadataVariables(ESDriver es) {
+
+    es.createBulkProcessor();
+
+    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
+        .actionGet();
+    while (true) {
+      for (SearchHit hit : scrollResp.getHits().getHits()) {
+        Map<String, Object> metadata = hit.getSource();
+        Map<String, Object> updatedValues = new HashMap<>();
+
+        //!!!important change to other normalizer class when using other metadata
+        MetadataFeature normalizer = new PODAACMetadataFeature();
+        normalizer.normalizeMetadataVariables(metadata, updatedValues);
+
+        UpdateRequest ur = es.generateUpdateRequest(indexName, metadataType, hit.getId(), updatedValues);
+        es.getBulkProcessor().add(ur);
+      }
+
+      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
+      if (scrollResp.getHits().getHits().length == 0) {
+        break;
+      }
+    }
+
+    es.destroyBulkProcessor();
+  }
+}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
deleted file mode 100644
index 28ffd5d..0000000
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/NormalizeVariables.java
+++ /dev/null
@@ -1,223 +0,0 @@
-package org.apache.sdap.mudrod.recommendation.pre;
-
-import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
-import org.apache.sdap.mudrod.driver.ESDriver;
-import org.apache.sdap.mudrod.driver.SparkDriver;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.update.UpdateRequest;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-import java.util.regex.Pattern;
-
-public class NormalizeVariables extends DiscoveryStepAbstract {
-
-  /**
-   *
-   */
-  private static final long serialVersionUID = 1L;
-  private static final Logger LOG = LoggerFactory.getLogger(NormalizeVariables.class);
-  // index name
-  private String indexName;
-  // type name of metadata in ES
-  private String metadataType;
-
-  /**
-   * Creates a new instance of OHEncoder.
-   *
-   * @param props the Mudrod configuration
-   * @param es    an instantiated {@link ESDriver}
-   * @param spark an instantiated {@link SparkDriver}
-   */
-  public NormalizeVariables(Properties props, ESDriver es, SparkDriver spark) {
-    super(props, es, spark);
-    indexName = props.getProperty("indexName");
-    metadataType = props.getProperty("recom_metadataType");
-  }
-
-  @Override
-  public Object execute() {
-    LOG.info("*****************processing metadata variables starts******************");
-    startTime = System.currentTimeMillis();
-
-    normalizeMetadataVariables(es);
-
-    endTime = System.currentTimeMillis();
-    LOG.info("*****************processing metadata variables ends******************Took {}s", (endTime - startTime) / 1000);
-
-    return null;
-  }
-
-  @Override
-  public Object execute(Object o) {
-    return null;
-  }
-
-  public void normalizeMetadataVariables(ESDriver es) {
-
-    es.createBulkProcessor();
-
-    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
-        .actionGet();
-    while (true) {
-      for (SearchHit hit : scrollResp.getHits().getHits()) {
-        Map<String, Object> metadata = hit.getSource();
-        Map<String, Object> updatedValues = new HashMap<>();
-
-        this.normalizeSpatialVariables(metadata, updatedValues);
-        this.normalizeTemporalVariables(metadata, updatedValues);
-        this.normalizeOtherVariables(metadata, updatedValues);
-
-        UpdateRequest ur = es.generateUpdateRequest(indexName, metadataType, hit.getId(), updatedValues);
-        es.getBulkProcessor().add(ur);
-      }
-
-      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
-      if (scrollResp.getHits().getHits().length == 0) {
-        break;
-      }
-    }
-
-    es.destroyBulkProcessor();
-  }
-
-  private void normalizeOtherVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
-    String shortname = (String) metadata.get("Dataset-ShortName");
-    double versionNUm = getVersionNum(shortname);
-    updatedValues.put("Dataset-Derivative-VersionNum", versionNUm);
-
-  }
-
-  private Double getVersionNum(String version) {
-    if (version == null) {
-      return 0.0;
-    }
-    Double versionNum;
-    Pattern p = Pattern.compile(".*[a-zA-Z].*");
-    if ("Operational/Near-Real-Time".equals(version)) {
-      versionNum = 2.0;
-    } else if (version.matches("[0-9]{1}[a-zA-Z]{1}")) {
-      versionNum = Double.parseDouble(version.substring(0, 1));
-    } else if (p.matcher(version).find()) {
-      versionNum = 0.0;
-    } else {
-      versionNum = Double.parseDouble(version);
-      if (versionNum >= 5) {
-        versionNum = 20.0;
-      }
-    }
-    return versionNum;
-  }
-
-  private void normalizeSpatialVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
-
-    // get spatial resolution
-    Double spatialR;
-    if (metadata.get("Dataset-SatelliteSpatialResolution") != null) {
-      spatialR = (Double) metadata.get("Dataset-SatelliteSpatialResolution");
-    } else {
-      Double gridR = (Double) metadata.get("Dataset-GridSpatialResolution");
-      if (gridR != null) {
-        spatialR = 111 * gridR;
-      } else {
-        spatialR = 25.0;
-      }
-    }
-    updatedValues.put("Dataset-Derivative-SpatialResolution", spatialR);
-
-    // Transform Longitude and calculate coverage area
-    double top = parseDouble((String) metadata.get("DatasetCoverage-NorthLat"));
-    double bottom = parseDouble((String) metadata.get("DatasetCoverage-SouthLat"));
-    double left = parseDouble((String) metadata.get("DatasetCoverage-WestLon"));
-    double right = parseDouble((String) metadata.get("DatasetCoverage-EastLon"));
-
-    if (left > 180) {
-      left = left - 360;
-    }
-
-    if (right > 180) {
-      right = right - 360;
-    }
-
-    if (left == right) {
-      left = -180;
-      right = 180;
-    }
-
-    double area = (top - bottom) * (right - left);
-
-    updatedValues.put("DatasetCoverage-Derivative-EastLon", right);
-    updatedValues.put("DatasetCoverage-Derivative-WestLon", left);
-    updatedValues.put("DatasetCoverage-Derivative-NorthLat", top);
-    updatedValues.put("DatasetCoverage-Derivative-SouthLat", bottom);
-    updatedValues.put("DatasetCoverage-Derivative-Area", area);
-
-    // get processing level
-    String processingLevel = (String) metadata.get("Dataset-ProcessingLevel");
-    double dProLevel = this.getProLevelNum(processingLevel);
-    updatedValues.put("Dataset-Derivative-ProcessingLevel", dProLevel);
-  }
-
-  private void normalizeTemporalVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
-
-    String trStr = (String) metadata.get("Dataset-TemporalResolution");
-    if ("".equals(trStr)) {
-      trStr = (String) metadata.get("Dataset-TemporalRepeat");
-    }
-
-    updatedValues.put("Dataset-Derivative-TemporalResolution", covertTimeUnit(trStr));
-  }
-
-  private Double covertTimeUnit(String str) {
-    Double timeInHour;
-    if (str.contains("Hour")) {
-      timeInHour = Double.parseDouble(str.split(" ")[0]);
-    } else if (str.contains("Day")) {
-      timeInHour = Double.parseDouble(str.split(" ")[0]) * 24;
-    } else if (str.contains("Week")) {
-      timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7;
-    } else if (str.contains("Month")) {
-      timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7 * 30;
-    } else if (str.contains("Year")) {
-      timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7 * 30 * 365;
-    } else {
-      timeInHour = 0.0;
-    }
-
-    return timeInHour;
-  }
-
-  public Double getProLevelNum(String pro) {
-    if (pro == null) {
-      return 1.0;
-    }
-    Double proNum;
-    Pattern p = Pattern.compile(".*[a-zA-Z].*");
-    if (pro.matches("[0-9]{1}[a-zA-Z]{1}")) {
-      proNum = Double.parseDouble(pro.substring(0, 1));
-    } else if (p.matcher(pro).find()) {
-      proNum = 1.0;
-    } else {
-      proNum = Double.parseDouble(pro);
-    }
-
-    return proNum;
-  }
-
-  private double parseDouble(String strNumber) {
-    if (strNumber != null && strNumber.length() > 0) {
-      try {
-        return Double.parseDouble(strNumber);
-      } catch (Exception e) {
-        return -1;
-      }
-    } else
-      return 0;
-  }
-}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
index e2b1f38..f4f13a9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/SessionCooccurence.java
@@ -1,8 +1,8 @@
 /*
- * Licensed under the Apache License, Version 2.0 (the "License"); you
- * may not use this file except in compliance with the License.
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
  * You may obtain a copy of the License at
- *
+ * 
  * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
@@ -11,6 +11,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+
 package org.apache.sdap.mudrod.recommendation.pre;
 
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
@@ -68,11 +73,11 @@ public Object execute() {
     JavaPairRDD<String, List<String>> sessionDatasetRDD = extractor.bulidSessionDatasetRDD(props, es, spark);
 
     // remove retired datasets
-    JavaPairRDD<String, List<String>> sessionFiltedDatasetsRDD = removeRetiredDataset(es, sessionDatasetRDD);
-    LabeledRowMatrix datasetSessionMatrix = MatrixUtil.createWordDocMatrix(sessionFiltedDatasetsRDD);
+    // JavaPairRDD<String, List<String>> sessionFiltedDatasetsRDD = removeRetiredDataset(es, sessionDatasetRDD);
+    LabeledRowMatrix datasetSessionMatrix = MatrixUtil.createWordDocMatrix(sessionDatasetRDD);
 
     // export
-    MatrixUtil.exportToCSV(datasetSessionMatrix.rowMatrix, datasetSessionMatrix.rowkeys, datasetSessionMatrix.colkeys, props.getProperty("session_metadata_Matrix"));
+    MatrixUtil.exportToCSV(datasetSessionMatrix.rowMatrix, datasetSessionMatrix.rowkeys, datasetSessionMatrix.colkeys, props.getProperty(MudrodConstants.METADATA_SESSION_MATRIX_PATH));
 
     endTime = System.currentTimeMillis();
 
@@ -109,7 +114,9 @@ public Object execute(Object o) {
       public Tuple2<String, List<String>> call(Tuple2<String, List<String>> arg0) throws Exception {
         List<String> oriDatasets = arg0._2;
         List<String> newDatasets = new ArrayList<>();
-        for (String name : oriDatasets) {
+        int size = oriDatasets.size();
+        for (int i = 0; i < size; i++) {
+          String name = oriDatasets.get(i);
           if (nameMap.containsKey(name)) {
             newDatasets.add(nameMap.get(name));
           }
@@ -131,7 +138,7 @@ public Object execute(Object o) {
   private Map<String, String> getOnServiceMetadata(ESDriver es) {
 
     String indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME);
-    String metadataType = props.getProperty("recom_metadataType");
+    String metadataType = MudrodConstants.RECOM_METADATA_TYPE;
 
     Map<String, String> shortnameMap = new HashMap<>();
     SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
@@ -139,7 +146,8 @@ public Object execute(Object o) {
     while (true) {
       for (SearchHit hit : scrollResp.getHits().getHits()) {
         Map<String, Object> metadata = hit.getSource();
-        String shortName = (String) metadata.get("Dataset-ShortName");
+        //String shortName = (String) metadata.get("Dataset-ShortName");
+        String shortName = (String) metadata.get(props.getProperty(MudrodConstants.METADATA_ID));
         shortnameMap.put(shortName.toLowerCase(), shortName);
       }
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/package-info.java
index cb528f6..4c95ade 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/pre/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes the preprocessing required by recommendation module.
  */
-package org.apache.sdap.mudrod.recommendation.pre;
+package org.apache.sdap.mudrod.recommendation.pre;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/AbstractBasedSimilarity.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/AbstractBasedSimilarity.java
index 7a288a4..08850da 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/AbstractBasedSimilarity.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/AbstractBasedSimilarity.java
@@ -1,8 +1,8 @@
 /*
- * Licensed under the Apache License, Version 2.0 (the "License"); you
- * may not use this file except in compliance with the License.
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
  * You may obtain a copy of the License at
- *
+ * 
  * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
@@ -11,11 +11,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+
 package org.apache.sdap.mudrod.recommendation.process;
 
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.semantics.SVDAnalyzer;
 import org.apache.sdap.mudrod.utils.LinkageTriple;
 import org.slf4j.Logger;
@@ -49,18 +55,11 @@ public Object execute() {
     startTime = System.currentTimeMillis();
 
     try {
-      /*String topicMatrixFile = props.getProperty("metadata_term_tfidf_matrix");
-      SemanticAnalyzer analyzer = new SemanticAnalyzer(props, es, spark);
-      List<LinkageTriple> triples = analyzer
-          .calTermSimfromMatrix(topicMatrixFile);
-      analyzer.saveToES(triples, props.getProperty("indexName"),
-          props.getProperty("metadataTermTFIDFSimType"), true, true);*/
-
       // for comparison
       SVDAnalyzer svd = new SVDAnalyzer(props, es, spark);
-      svd.getSVDMatrix(props.getProperty("metadata_word_tfidf_matrix"), 150, props.getProperty("metadata_word_tfidf_matrix"));
-      List<LinkageTriple> tripleList = svd.calTermSimfromMatrix(props.getProperty("metadata_word_tfidf_matrix"));
-      svd.saveToES(tripleList, props.getProperty("indexName"), props.getProperty("metadataWordTFIDFSimType"), true, true);
+      svd.getSVDMatrix(props.getProperty(MudrodConstants.METADATA_WORD_MATRIX_PATH), 150, props.getProperty(MudrodConstants.METADATA_WORD_MATRIX_PATH));
+      List<LinkageTriple> tripleList = svd.calTermSimfromMatrix(props.getProperty(MudrodConstants.METADATA_WORD_MATRIX_PATH));
+      svd.saveToES(tripleList, props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.METADATA_WORD_SIM_TYPE, true, true);
 
     } catch (Exception e) {
       e.printStackTrace();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/FeatureBasedSimilarity.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/FeatureBasedSimilarity.java
new file mode 100644
index 0000000..9429dc6
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/FeatureBasedSimilarity.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+package org.apache.sdap.mudrod.recommendation.process;
+
+import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.recommendation.structure.MetadataFeature;
+import org.apache.sdap.mudrod.recommendation.structure.PODAACMetadataFeature;
+
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.text.DecimalFormat;
+import java.util.*;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+
+public class FeatureBasedSimilarity extends DiscoveryStepAbstract implements Serializable {
+
+  /**
+   *
+   */
+  private static final long serialVersionUID = 1L;
+
+  private static final Logger LOG = LoggerFactory.getLogger(FeatureBasedSimilarity.class);
+
+  private DecimalFormat df = new DecimalFormat("#.000");
+  // a map from variable to its type
+  MetadataFeature metadata = null;
+  public Map<String, Integer> variableTypes;
+  public Map<String, Integer> variableWeights;
+
+
+  // index name
+  private String indexName;
+  // type name of metadata in ES
+  private String metadataType;
+  private String variableSimType;
+
+  /**
+   * Creates a new instance of OHEncoder.
+   *
+   * @param props the Mudrod configuration
+   * @param es    an instantiated {@link ESDriver}
+   * @param spark an instantiated {@link SparkDriver}
+   */
+  public FeatureBasedSimilarity(Properties props, ESDriver es, SparkDriver spark) {
+    super(props, es, spark);
+
+    indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME);
+    metadataType = MudrodConstants.RECOM_METADATA_TYPE;
+    variableSimType = MudrodConstants.METADATA_FEATURE_SIM_TYPE;
+ 
+    // !!! important, please change to other class when using other metadata
+    metadata = new PODAACMetadataFeature();
+    metadata.inital();
+    variableTypes = metadata.featureTypes;
+    variableWeights = metadata.featureWeights;
+  }
+
+  @Override
+  public Object execute() {
+    LOG.info("*****************calculating metadata feature based similarity starts******************");
+    startTime = System.currentTimeMillis();
+    es.deleteType(indexName, variableSimType);
+    addMapping(es, indexName, variableSimType);
+
+    featureSimilarity(es);
+    es.refreshIndex();
+    normalizeVariableWeight(es);
+    es.refreshIndex();
+    endTime = System.currentTimeMillis();
+    LOG.info("*****************calculating metadata feature based similarity ends******************Took {}s", (endTime - startTime) / 1000);
+    return null;
+  }
+
+  @Override
+  public Object execute(Object o) {
+    return null;
+  }
+
+  public void featureSimilarity(ESDriver es) {
+
+    es.createBulkProcessor();
+
+    List<Map<String, Object>> metadatas = new ArrayList<>();
+    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
+        .actionGet();
+    while (true) {
+      for (SearchHit hit : scrollResp.getHits().getHits()) {
+        Map<String, Object> metadataA = hit.getSource();
+        metadatas.add(metadataA);
+      }
+
+      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
+      if (scrollResp.getHits().getHits().length == 0) {
+        break;
+      }
+    }
+
+    int size = metadatas.size();
+
+    for (int i = 0; i < size; i++) {
+      Map<String, Object> metadataA = metadatas.get(i);
+      String shortNameA = (String) metadataA.get(props.getProperty(MudrodConstants.METADATA_ID));
+      for (int j = 0; j < size; j++) {
+    	metadataA = metadatas.get(i);
+        Map<String, Object> metadataB = metadatas.get(j);
+        String shortNameB = (String) metadataB.get(props.getProperty(MudrodConstants.METADATA_ID));
+
+        try {
+          XContentBuilder contentBuilder = jsonBuilder().startObject();
+          contentBuilder.field("concept_A", shortNameA);
+          contentBuilder.field("concept_B", shortNameB);
+
+          // feature similarity
+          metadata.featureSimilarity(metadataA, metadataB, contentBuilder);
+
+          contentBuilder.endObject();
+
+          IndexRequest ir = new IndexRequest(indexName, variableSimType).source(contentBuilder);
+          es.getBulkProcessor().add(ir);
+
+        } catch (IOException e1) {
+          e1.printStackTrace();
+        }
+
+      }
+    }
+
+    es.destroyBulkProcessor();
+  }
+
+  public static void addMapping(ESDriver es, String index, String type) {
+    XContentBuilder Mapping;
+    try {
+      Mapping = jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type", "string").field("index", "not_analyzed").endObject()
+          .startObject("concept_B").field("type", "string").field("index", "not_analyzed").endObject()
+
+          .endObject().endObject().endObject();
+
+      es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(Mapping).execute().actionGet();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  public void normalizeVariableWeight(ESDriver es) {
+
+    es.createBulkProcessor();
+
+    double totalWeight = 0.0;
+    for (String variable : variableWeights.keySet()) {
+      totalWeight += variableWeights.get(variable);
+    }
+
+    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(variableSimType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
+        .actionGet();
+    while (true) {
+      for (SearchHit hit : scrollResp.getHits().getHits()) {
+        Map<String, Object> similarities = hit.getSource();
+
+        double totalSim = 0.0;
+        for (String variable : variableWeights.keySet()) {
+          if (similarities.containsKey(variable + "_Sim")) {
+            double value = (double) similarities.get(variable + "_Sim");
+            double weight = variableWeights.get(variable);
+            totalSim += weight * value;
+          }
+        }
+
+        double weight = totalSim / totalWeight;
+        UpdateRequest ur = es.generateUpdateRequest(indexName, variableSimType, hit.getId(), "weight", weight);
+        es.getBulkProcessor().add(ur);
+      }
+
+      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
+      if (scrollResp.getHits().getHits().length == 0) {
+        break;
+      }
+    }
+
+    es.destroyBulkProcessor();
+  }
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
index 5ea461b..1877b1c 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/SessionBasedCF.java
@@ -1,8 +1,8 @@
 /*
- * Licensed under the Apache License, Version 2.0 (the "License"); you
- * may not use this file except in compliance with the License.
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
  * You may obtain a copy of the License at
- *
+ * 
  * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
@@ -11,11 +11,17 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+/**
+ * This package includes the preprocessing, processing, and data structure used
+ * by recommendation module.
+ */
+
 package org.apache.sdap.mudrod.recommendation.process;
 
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.semantics.SemanticAnalyzer;
 import org.apache.sdap.mudrod.utils.LinkageTriple;
 import org.apache.sdap.mudrod.utils.SimilarityUtil;
@@ -29,12 +35,12 @@
 /**
  * ClassName: Recommend metedata based on session level co-occurrence
  */
-public class SessionBasedCF extends DiscoveryStepAbstract {
+public class sessionBasedCF extends DiscoveryStepAbstract {
 
-  private static final Logger LOG = LoggerFactory.getLogger(SessionBasedCF.class);
+  private static final Logger LOG = LoggerFactory.getLogger(sessionBasedCF.class);
 
   /**
-   * Creates a new instance of SessionBasedCF.
+   * Creates a new instance of sessionBasedCF.
    *
    * @param props
    *          the Mudrod configuration
@@ -43,7 +49,7 @@
    * @param spark
    *          the spark drive
    */
-  public SessionBasedCF(Properties props, ESDriver es, SparkDriver spark) {
+  public sessionBasedCF(Properties props, ESDriver es, SparkDriver spark) {
     super(props, es, spark);
   }
 
@@ -53,12 +59,12 @@ public Object execute() {
     startTime = System.currentTimeMillis();
 
     try {
-      String session_metadatFile = props.getProperty("session_metadata_Matrix");
+      String session_metadatFile = props.getProperty(MudrodConstants.METADATA_SESSION_MATRIX_PATH);
       File f = new File(session_metadatFile);
       if (f.exists()) {
         SemanticAnalyzer analyzer = new SemanticAnalyzer(props, es, spark);
         List<LinkageTriple> triples = analyzer.calTermSimfromMatrix(session_metadatFile, SimilarityUtil.SIM_PEARSON, 1);
-        analyzer.saveToES(triples, props.getProperty("indexName"), props.getProperty("metadataSessionBasedSimType"), true, false);
+        analyzer.saveToES(triples, props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.METADATA_SESSION_SIM_TYPE, true, false);
       }
 
     } catch (Exception e) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
deleted file mode 100644
index 9bf0884..0000000
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/VariableBasedSimilarity.java
+++ /dev/null
@@ -1,375 +0,0 @@
-package org.apache.sdap.mudrod.recommendation.process;
-
-import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
-import org.apache.sdap.mudrod.driver.ESDriver;
-import org.apache.sdap.mudrod.driver.SparkDriver;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.update.UpdateRequest;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
-
-public class VariableBasedSimilarity extends DiscoveryStepAbstract implements Serializable {
-
-  /**
-   *
-   */
-  private static final long serialVersionUID = 1L;
-
-  private static final Logger LOG = LoggerFactory.getLogger(VariableBasedSimilarity.class);
-
-  // a map from variable to its type
-  public Map<String, Integer> variableTypes;
-  public Map<String, Integer> variableWeights;
-
-  private static final Integer VAR_CATEGORICAL = 3;
-  private static final Integer VAR_ORDINAL = 4;
-
-  // index name
-  private String indexName;
-  // type name of metadata in ES
-  private String metadataType;
-  private String variableSimType;
-
-  /**
-   * Creates a new instance of OHEncoder.
-   *
-   * @param props the Mudrod configuration
-   * @param es    an instantiated {@link ESDriver}
-   * @param spark an instantiated {@link SparkDriver}
-   */
-  public VariableBasedSimilarity(Properties props, ESDriver es, SparkDriver spark) {
-    super(props, es, spark);
-
-    indexName = props.getProperty("indexName");
-    metadataType = props.getProperty("recom_metadataType");
-    variableSimType = props.getProperty("metadataCodeSimType");
-    this.inital();
-  }
-
-  @Override
-  public Object execute() {
-    LOG.info("*****************calculating metadata variables based similarity starts******************");
-    startTime = System.currentTimeMillis();
-    es.deleteType(indexName, variableSimType);
-    addMapping(es, indexName, variableSimType);
-
-    VariableBasedSimilarity(es);
-    es.refreshIndex();
-    normalizeVariableWeight(es);
-    es.refreshIndex();
-    endTime = System.currentTimeMillis();
-    LOG.info("*****************calculating metadata variables based similarity ends******************Took {}s", (endTime - startTime) / 1000);
-    return null;
-  }
-
-  @Override
-  public Object execute(Object o) {
-    return null;
-  }
-
-  public void inital() {
-    this.initVariableType();
-    this.initVariableWeight();
-  }
-
-  private void initVariableType() {
-    variableTypes = new HashMap<>();
-
-    variableTypes.put("DatasetParameter-Variable", VAR_CATEGORICAL);
-    variableTypes.put("DatasetRegion-Region", VAR_CATEGORICAL);
-    variableTypes.put("Dataset-ProjectionType", VAR_CATEGORICAL);
-    variableTypes.put("Dataset-ProcessingLevel", VAR_CATEGORICAL);
-    variableTypes.put("DatasetParameter-Topic", VAR_CATEGORICAL);
-    variableTypes.put("DatasetParameter-Term", VAR_CATEGORICAL);
-    variableTypes.put("DatasetParameter-Category", VAR_CATEGORICAL);
-    variableTypes.put("DatasetPolicy-DataFormat", VAR_CATEGORICAL);
-    variableTypes.put("Collection-ShortName", VAR_CATEGORICAL);
-    variableTypes.put("DatasetSource-Source-Type", VAR_CATEGORICAL);
-    variableTypes.put("DatasetSource-Source-ShortName", VAR_CATEGORICAL);
-    variableTypes.put("DatasetSource-Sensor-ShortName", VAR_CATEGORICAL);
-    variableTypes.put("DatasetPolicy-Availability", VAR_CATEGORICAL);
-    variableTypes.put("Dataset-Provider-ShortName", VAR_CATEGORICAL);
-
-    variableTypes.put("Dataset-Derivative-ProcessingLevel", VAR_ORDINAL);
-    variableTypes.put("Dataset-Derivative-TemporalResolution", VAR_ORDINAL);
-    variableTypes.put("Dataset-Derivative-SpatialResolution", VAR_ORDINAL);
-  }
-
-  private void initVariableWeight() {
-    variableWeights = new HashMap<>();
-
-    variableWeights.put("Dataset-Derivative-ProcessingLevel", 5);
-    variableWeights.put("DatasetParameter-Category", 5);
-    variableWeights.put("DatasetParameter-Variable", 5);
-    variableWeights.put("DatasetSource-Sensor-ShortName", 5);
-
-    variableWeights.put("DatasetPolicy-Availability", 4);
-    variableWeights.put("DatasetRegion-Region", 4);
-    variableWeights.put("DatasetSource-Source-Type", 4);
-    variableWeights.put("DatasetSource-Source-ShortName", 4);
-    variableWeights.put("DatasetParameter-Term", 4);
-    variableWeights.put("DatasetPolicy-DataFormat", 4);
-    variableWeights.put("Dataset-Derivative-SpatialResolution", 4);
-    variableWeights.put("Temporal_Covergae", 4);
-
-    variableWeights.put("DatasetParameter-Topic", 3);
-    variableWeights.put("Collection-ShortName", 3);
-    variableWeights.put("Dataset-Derivative-TemporalResolution", 3);
-    variableWeights.put("Spatial_Covergae", 3);
-
-    variableWeights.put("Dataset-ProjectionType", 1);
-    variableWeights.put("Dataset-Provider-ShortName", 1);
-  }
-
-  public void VariableBasedSimilarity(ESDriver es) {
-
-    es.createBulkProcessor();
-
-    List<Map<String, Object>> metadatas = new ArrayList<>();
-    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
-        .actionGet();
-    while (true) {
-      for (SearchHit hit : scrollResp.getHits().getHits()) {
-        Map<String, Object> metadataA = hit.getSource();
-        metadatas.add(metadataA);
-      }
-
-      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
-      if (scrollResp.getHits().getHits().length == 0) {
-        break;
-      }
-    }
-
-    for (Map<String, Object> metadataA : metadatas) {
-      String shortNameA = (String) metadataA.get("Dataset-ShortName");
-
-      for (Map<String, Object> metadataB : metadatas) {
-        String shortNameB = (String) metadataB.get("Dataset-ShortName");
-
-        try {
-          XContentBuilder contentBuilder = jsonBuilder().startObject();
-          contentBuilder.field("concept_A", shortNameA);
-          contentBuilder.field("concept_B", shortNameB);
-
-          // spatial similarity
-          this.spatialSimilarity(metadataA, metadataB, contentBuilder);
-          // temporal similarity
-          this.temporalSimilarity(metadataA, metadataB, contentBuilder);
-          // categorical variables similarity
-          this.categoricalVariablesSimilarity(metadataA, metadataB, contentBuilder);
-          // ordinal variables similarity
-          this.ordinalVariablesSimilarity(metadataA, metadataB, contentBuilder);
-
-          contentBuilder.endObject();
-
-          IndexRequest ir = new IndexRequest(indexName, variableSimType).source(contentBuilder);
-          es.getBulkProcessor().add(ir);
-
-        } catch (IOException e1) {
-          e1.printStackTrace();
-        }
-
-      }
-    }
-
-    es.destroyBulkProcessor();
-  }
-
-  /*
-   * refer to P. Frontiera, R. Larson, and J. Radke (2008) A comparison of
-     geometric approaches to assessing spatial similarity for GIR.
-     International Journal of Geographical Information Science,
-     22(3)
-   */
-  public void spatialSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder) throws IOException {
-
-    double topA = (double) metadataA.get("DatasetCoverage-Derivative-NorthLat");
-    double bottomA = (double) metadataA.get("DatasetCoverage-Derivative-SouthLat");
-    double leftA = (double) metadataA.get("DatasetCoverage-Derivative-WestLon");
-    double rightA = (double) metadataA.get("DatasetCoverage-Derivative-EastLon");
-    double areaA = (double) metadataA.get("DatasetCoverage-Derivative-Area");
-
-    double topB = (double) metadataB.get("DatasetCoverage-Derivative-NorthLat");
-    double bottomB = (double) metadataB.get("DatasetCoverage-Derivative-SouthLat");
-    double leftB = (double) metadataB.get("DatasetCoverage-Derivative-WestLon");
-    double rightB = (double) metadataB.get("DatasetCoverage-Derivative-EastLon");
-    double areaB = (double) metadataB.get("DatasetCoverage-Derivative-Area");
-
-    // Intersect area
-    double xOverlap = Math.max(0, Math.min(rightA, rightB) - Math.max(leftA, leftB));
-    double yOverlap = Math.max(0, Math.min(topA, topB) - Math.max(bottomA, bottomB));
-    double overlapArea = xOverlap * yOverlap;
-
-    // Calculate coverage similarity
-    double similarity = 0.0;
-    if (areaA > 0 && areaB > 0) {
-      similarity = (overlapArea / areaA + overlapArea / areaB) * 0.5;
-    }
-
-    contentBuilder.field("Spatial_Covergae_Sim", similarity);
-  }
-
-  public void temporalSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder) throws IOException {
-
-    double similarity;
-    double startTimeA = Double.parseDouble((String) metadataA.get("Dataset-DatasetCoverage-StartTimeLong"));
-    String endTimeAStr = (String) metadataA.get("Dataset-DatasetCoverage-StopTimeLong");
-    double endTimeA;
-    if ("".equals(endTimeAStr)) {
-      endTimeA = System.currentTimeMillis();
-    } else {
-      endTimeA = Double.parseDouble(endTimeAStr);
-    }
-    double timespanA = endTimeA - startTimeA;
-
-    double startTimeB = Double.parseDouble((String) metadataB.get("Dataset-DatasetCoverage-StartTimeLong"));
-    String endTimeBStr = (String) metadataB.get("Dataset-DatasetCoverage-StopTimeLong");
-    double endTimeB;
-    if ("".equals(endTimeBStr)) {
-      endTimeB = System.currentTimeMillis();
-    } else {
-      endTimeB = Double.parseDouble(endTimeBStr);
-    }
-    double timespanB = endTimeB - startTimeB;
-
-    double intersect;
-    if (startTimeB >= endTimeA || endTimeB <= startTimeA) {
-      intersect = 0.0;
-    } else if (startTimeB >= startTimeA && endTimeB <= endTimeA) {
-      intersect = timespanB;
-    } else if (startTimeA >= startTimeB && endTimeA <= endTimeB) {
-      intersect = timespanA;
-    } else {
-      intersect = (startTimeA > startTimeB) ? (endTimeB - startTimeA) : (endTimeA - startTimeB);
-    }
-
-    similarity = intersect / (Math.sqrt(timespanA) * Math.sqrt(timespanB));
-    contentBuilder.field("Temporal_Covergae_Sim", similarity);
-  }
-
-  public void categoricalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder) throws IOException {
-
-    for (String variable : variableTypes.keySet()) {
-      Integer type = variableTypes.get(variable);
-      if (type != VAR_CATEGORICAL) {
-        continue;
-      }
-
-      double similarity = 0.0;
-      Object valueA = metadataA.get(variable);
-      Object valueB = metadataB.get(variable);
-      if (valueA instanceof ArrayList) {
-        ArrayList<String> aList = (ArrayList<String>) valueA;
-        ArrayList<String> bList = (ArrayList<String>) valueB;
-        if (aList != null && bList != null) {
-
-          int lengthA = aList.size();
-          List<String> newAList = new ArrayList<>(aList);
-          List<String> newBList = new ArrayList<>(bList);
-          newAList.retainAll(newBList);
-          similarity = newAList.size() / lengthA;
-        }
-
-      } else if (valueA instanceof String) {
-        if (valueA.equals(valueB)) {
-          similarity = 1.0;
-        }
-      }
-
-      contentBuilder.field(variable + "_Sim", similarity);
-    }
-  }
-
-  public void ordinalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder) throws IOException {
-    for (String variable : variableTypes.keySet()) {
-      Integer type = variableTypes.get(variable);
-      if (type != VAR_ORDINAL) {
-        continue;
-      }
-
-      double similarity = 0.0;
-      Object valueA = metadataA.get(variable);
-      Object valueB = metadataB.get(variable);
-      if (valueA != null && valueB != null) {
-
-        double a = (double) valueA;
-        double b = (double) valueB;
-        if (a != 0.0) {
-          similarity = 1 - Math.abs(b - a) / a;
-          if (similarity < 0) {
-            similarity = 0.0;
-          }
-        }
-      }
-
-      contentBuilder.field(variable + "_Sim", similarity);
-    }
-  }
-
-  public static void addMapping(ESDriver es, String index, String type) {
-    XContentBuilder Mapping;
-    try {
-      Mapping = jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type", "string").field("index", "not_analyzed").endObject()
-          .startObject("concept_B").field("type", "string").field("index", "not_analyzed").endObject()
-
-          .endObject().endObject().endObject();
-
-      es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(Mapping).execute().actionGet();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  public void normalizeVariableWeight(ESDriver es) {
-
-    es.createBulkProcessor();
-
-    double totalWeight = 0.0;
-    for (String variable : variableWeights.keySet()) {
-      totalWeight += variableWeights.get(variable);
-    }
-
-    SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(variableSimType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
-        .actionGet();
-    while (true) {
-      for (SearchHit hit : scrollResp.getHits().getHits()) {
-        Map<String, Object> similarities = hit.getSource();
-
-        double totalSim = 0.0;
-        for (String variable : variableWeights.keySet()) {
-          if (similarities.containsKey(variable + "_Sim")) {
-            double value = (double) similarities.get(variable + "_Sim");
-            double weight = variableWeights.get(variable);
-            totalSim += weight * value;
-          }
-        }
-
-        double weight = totalSim / totalWeight;
-        UpdateRequest ur = es.generateUpdateRequest(indexName, variableSimType, hit.getId(), "weight", weight);
-        es.getBulkProcessor().add(ur);
-      }
-
-      scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
-      if (scrollResp.getHits().getHits().length == 0) {
-        break;
-      }
-    }
-
-    es.destroyBulkProcessor();
-  }
-}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/package-info.java
index 557c53b..7b1aeac 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/process/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes the processing required by recommendation module.
  */
-package org.apache.sdap.mudrod.recommendation.process;
+package org.apache.sdap.mudrod.recommendation.process;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
index f38f8ed..fbdaec5 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/HybridRecommendation.java
@@ -16,10 +16,10 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
@@ -45,7 +45,7 @@
   // format decimal
   DecimalFormat df = new DecimalFormat("#.00");
   // index name
-  protected static final String INDEX_NAME = "indexName";
+  protected static final String INDEX_NAME = MudrodConstants.ES_INDEX_NAME;
   private static final String WEIGHT = "weight";
 
   /**
@@ -87,13 +87,13 @@ public Object execute(Object o) {
   public JsonObject getRecomDataInJson(String input, int num) {
     JsonObject resultJson = new JsonObject();
 
-    String type = props.getProperty("metadataCodeSimType");
+    String type = MudrodConstants.METADATA_FEATURE_SIM_TYPE;
     Map<String, Double> sortedVariableSimMap = getRelatedData(type, input, num + 10);
 
-    type = props.getProperty("metadataWordTFIDFSimType");
+    type = MudrodConstants.METADATA_WORD_SIM_TYPE;
     Map<String, Double> sortedAbstractSimMap = getRelatedData(type, input, num + 10);
 
-    type = props.getProperty("metadataSessionBasedSimType");
+    type = MudrodConstants.METADATA_SESSION_SIM_TYPE;
     Map<String, Double> sortedSessionSimMap = getRelatedData(type, input, num + 10);
 
     JsonElement variableSimJson = mapToJson(sortedVariableSimMap, num);
@@ -183,8 +183,9 @@ protected JsonElement mapToJson(Map<String, Double> wordweights, int num) {
     Map<String, Double> sortedMap = new HashMap<>();
     try {
       List<LinkedTerm> links = getRelatedDataFromES(type, input, num);
-      for (LinkedTerm link : links) {
-        termsMap.put(link.term, link.weight);
+      int size = links.size();
+      for (int i = 0; i < size; i++) {
+        termsMap.put(links.get(i).term, links.get(i).weight);
       }
 
       sortedMap = sortMapByValue(termsMap); // terms_map will be empty
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataFeature.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataFeature.java
new file mode 100644
index 0000000..0f1a119
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataFeature.java
@@ -0,0 +1,71 @@
+package org.apache.sdap.mudrod.recommendation.structure;
+
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.utils.LabeledRowMatrix;
+import org.apache.sdap.mudrod.utils.MatrixUtil;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.mllib.linalg.distributed.RowMatrix;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import scala.Tuple2;
+import scala.tools.nsc.transform.SpecializeTypes.Abstract;
+
+import java.io.Serializable;
+import java.util.*;
+
+public abstract class MetadataFeature implements Serializable {
+	
+	protected static final Integer VAR_SPATIAL = 1;
+	protected static final Integer VAR_TEMPORAL = 2;
+	protected static final Integer VAR_CATEGORICAL = 3;
+	protected static final Integer VAR_ORDINAL = 4;
+	
+	public Map<String, Integer> featureTypes = new HashMap<>();
+	public Map<String, Integer> featureWeights = new HashMap<>();
+
+	public void normalizeMetadataVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
+
+		this.normalizeSpatialVariables(metadata, updatedValues);
+		this.normalizeTemporalVariables(metadata, updatedValues);
+		this.normalizeOtherVariables(metadata, updatedValues);
+	}
+	
+	public void inital() {
+	    this.initFeatureType();
+	    this.initFeatureWeight();
+	}
+	
+	public void featureSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder) {
+		 this.spatialSimilarity(metadataA, metadataB, contentBuilder);
+		 this.temporalSimilarity(metadataA, metadataB, contentBuilder);
+		 this.categoricalVariablesSimilarity(metadataA, metadataB, contentBuilder);
+		 this.ordinalVariablesSimilarity(metadataA, metadataB, contentBuilder);
+	}
+
+	/* for normalization */
+	public abstract void normalizeSpatialVariables(Map<String, Object> metadata, Map<String, Object> updatedValues);
+
+	public abstract void normalizeTemporalVariables(Map<String, Object> metadata, Map<String, Object> updatedValues);
+
+	public abstract void normalizeOtherVariables(Map<String, Object> metadata, Map<String, Object> updatedValues);
+
+	/* for similarity */
+	public abstract void initFeatureType();
+
+	public abstract void initFeatureWeight();
+	
+	public abstract void spatialSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder);
+  
+	public abstract void temporalSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder);
+
+	public abstract void categoricalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder);
+  
+	public abstract void ordinalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB, XContentBuilder contentBuilder);
+}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataTokenizer.java
similarity index 85%
rename from core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
rename to core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataTokenizer.java
index cda8d6f..d55f4fa 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataOpt.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/MetadataTokenizer.java
@@ -18,7 +18,7 @@
 import java.io.Serializable;
 import java.util.*;
 
-public class MetadataOpt implements Serializable {
+public class MetadataTokenizer implements Serializable {
 
   /**
    * 
@@ -31,24 +31,21 @@
   public static final String SPLIT_BLANK = " ";
   public static final String SPLIT_COMMA = ",";
 
-  public MetadataOpt(Properties props) {
+  public MetadataTokenizer(Properties props) {
     indexName = props.getProperty(MudrodConstants.ES_INDEX_NAME);
-    metadataType = props.getProperty("recom_metadataType");
-
-    variables = new ArrayList<>();
-    variables.add("DatasetParameter-Term");
-    variables.add("DatasetParameter-Variable");
-    variables.add("Dataset-Description");
-    variables.add("Dataset-LongName");
+    metadataType = MudrodConstants.RECOM_METADATA_TYPE;
+    
+    String source = props.getProperty(MudrodConstants.SEMANTIC_FIELDS);
+    variables = new ArrayList<String>(Arrays.asList(source.split(",")));
   }
 
-  public JavaPairRDD<String, String> loadAll(ESDriver es, SparkDriver spark) throws Exception {
-    List<Tuple2<String, String>> datasetsTokens = this.loadMetadataFromES(es, variables);
+  public JavaPairRDD<String, String> loadAll(ESDriver es, SparkDriver spark, String metadataName) throws Exception {
+    List<Tuple2<String, String>> datasetsTokens = this.loadMetadataFromES(es, variables, metadataName);
     return this.parallizeData(spark, datasetsTokens);
   }
 
-  public JavaPairRDD<String, String> loadAll(ESDriver es, SparkDriver spark, List<String> variables) throws Exception {
-    List<Tuple2<String, String>> datasetsTokens = this.loadMetadataFromES(es, variables);
+  public JavaPairRDD<String, String> loadAll(ESDriver es, SparkDriver spark, List<String> variables, String metadataName) throws Exception {
+    List<Tuple2<String, String>> datasetsTokens = this.loadMetadataFromES(es, variables, metadataName);
     return this.parallizeData(spark, datasetsTokens);
   }
 
@@ -99,7 +96,7 @@ public MetadataOpt(Properties props) {
     return java.util.Arrays.asList(tokens);
   }
 
-  public List<Tuple2<String, String>> loadMetadataFromES(ESDriver es, List<String> variables) throws Exception {
+  public List<Tuple2<String, String>> loadMetadataFromES(ESDriver es, List<String> variables, String metadataName) throws Exception {
 
     SearchResponse scrollResp = es.getClient().prepareSearch(indexName).setTypes(metadataType).setQuery(QueryBuilders.matchAllQuery()).setScroll(new TimeValue(60000)).setSize(100).execute()
         .actionGet();
@@ -109,10 +106,12 @@ public MetadataOpt(Properties props) {
 
       for (SearchHit hit : scrollResp.getHits().getHits()) {
         Map<String, Object> result = hit.getSource();
-        String shortName = (String) result.get("Dataset-ShortName");
+        String shortName = (String) result.get(metadataName);
 
         String filedStr = "";
-        for (String filed : variables) {
+        int size = variables.size();
+        for (int i = 0; i < size; i++) {
+          String filed = variables.get(i);
           Object filedValue = result.get(filed);
 
           if (filedValue != null) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/PODAACMetadataFeature.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/PODAACMetadataFeature.java
new file mode 100644
index 0000000..5c0e20f
--- /dev/null
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/PODAACMetadataFeature.java
@@ -0,0 +1,360 @@
+package org.apache.sdap.mudrod.recommendation.structure;
+
+import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.utils.LabeledRowMatrix;
+import org.apache.sdap.mudrod.utils.MatrixUtil;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.mllib.linalg.distributed.RowMatrix;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.QueryBuilders;
+import org.elasticsearch.search.SearchHit;
+import scala.Tuple2;
+import scala.tools.nsc.transform.SpecializeTypes.Abstract;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.*;
+import java.util.regex.Pattern;
+
+public class PODAACMetadataFeature extends MetadataFeature {
+
+	public void normalizeSpatialVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
+
+		// get spatial resolution
+		Double spatialR;
+		if (metadata.get("Dataset-SatelliteSpatialResolution") != null) {
+			spatialR = (Double) metadata.get("Dataset-SatelliteSpatialResolution");
+		} else {
+			Double gridR = (Double) metadata.get("Dataset-GridSpatialResolution");
+			if (gridR != null) {
+				spatialR = 111 * gridR;
+			} else {
+				spatialR = 25.0;
+			}
+		}
+		updatedValues.put("Dataset-Derivative-SpatialResolution", spatialR);
+
+		// Transform Longitude and calculate coverage area
+		double top = parseDouble((String) metadata.get("DatasetCoverage-NorthLat"));
+		double bottom = parseDouble((String) metadata.get("DatasetCoverage-SouthLat"));
+		double left = parseDouble((String) metadata.get("DatasetCoverage-WestLon"));
+		double right = parseDouble((String) metadata.get("DatasetCoverage-EastLon"));
+
+		if (left > 180) {
+			left = left - 360;
+		}
+
+		if (right > 180) {
+			right = right - 360;
+		}
+
+		if (left == right) {
+			left = -180;
+			right = 180;
+		}
+
+		double area = (top - bottom) * (right - left);
+
+		updatedValues.put("DatasetCoverage-Derivative-EastLon", right);
+		updatedValues.put("DatasetCoverage-Derivative-WestLon", left);
+		updatedValues.put("DatasetCoverage-Derivative-NorthLat", top);
+		updatedValues.put("DatasetCoverage-Derivative-SouthLat", bottom);
+		updatedValues.put("DatasetCoverage-Derivative-Area", area);
+
+		// get processing level
+		String processingLevel = (String) metadata.get("Dataset-ProcessingLevel");
+		double dProLevel = this.getProLevelNum(processingLevel);
+		updatedValues.put("Dataset-Derivative-ProcessingLevel", dProLevel);
+	}
+
+	public void normalizeTemporalVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
+		String trStr = (String) metadata.get("Dataset-TemporalResolution");
+		if ("".equals(trStr)) {
+			trStr = (String) metadata.get("Dataset-TemporalRepeat");
+		}
+
+		updatedValues.put("Dataset-Derivative-TemporalResolution", covertTimeUnit(trStr));
+	}
+
+	public void normalizeOtherVariables(Map<String, Object> metadata, Map<String, Object> updatedValues) {
+		String shortname = (String) metadata.get("Dataset-ShortName");
+		double versionNUm = getVersionNum(shortname);
+		updatedValues.put("Dataset-Derivative-VersionNum", versionNUm);
+	}
+
+	private Double covertTimeUnit(String str) {
+		Double timeInHour;
+		if (str.contains("Hour")) {
+			timeInHour = Double.parseDouble(str.split(" ")[0]);
+		} else if (str.contains("Day")) {
+			timeInHour = Double.parseDouble(str.split(" ")[0]) * 24;
+		} else if (str.contains("Week")) {
+			timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7;
+		} else if (str.contains("Month")) {
+			timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7 * 30;
+		} else if (str.contains("Year")) {
+			timeInHour = Double.parseDouble(str.split(" ")[0]) * 24 * 7 * 30 * 365;
+		} else {
+			timeInHour = 0.0;
+		}
+
+		return timeInHour;
+	}
+
+	private double parseDouble(String strNumber) {
+		if (strNumber != null && strNumber.length() > 0) {
+			try {
+				return Double.parseDouble(strNumber);
+			} catch (Exception e) {
+				return -1;
+			}
+		} else
+			return 0;
+	}
+
+	public Double getProLevelNum(String pro) {
+		if (pro == null) {
+			return 1.0;
+		}
+		Double proNum = 0.0;
+		Pattern p = Pattern.compile(".*[a-zA-Z].*");
+		if (pro.matches("[0-9]{1}[a-zA-Z]{1}")) {
+			proNum = Double.parseDouble(pro.substring(0, 1));
+		} else if (p.matcher(pro).find()) {
+			proNum = 1.0;
+		} else {
+			proNum = Double.parseDouble(pro);
+		}
+
+		return proNum;
+	}
+
+	private Double getVersionNum(String version) {
+		if (version == null) {
+			return 0.0;
+		}
+		Double versionNum = 0.0;
+		Pattern p = Pattern.compile(".*[a-zA-Z].*");
+		if ("Operational/Near-Real-Time".equals(version)) {
+			versionNum = 2.0;
+		} else if (version.matches("[0-9]{1}[a-zA-Z]{1}")) {
+			versionNum = Double.parseDouble(version.substring(0, 1));
+		} else if (p.matcher(version).find()) {
+			versionNum = 0.0;
+		} else {
+			versionNum = Double.parseDouble(version);
+			if (versionNum >= 5) {
+				versionNum = 20.0;
+			}
+		}
+		return versionNum;
+	}
+
+	@Override
+	public void initFeatureType() {
+		// TODO Auto-generated method stub
+		//Map<String, Integer> featureTypes = new HashMap<>();
+		featureTypes.put("DatasetParameter-Variable", VAR_CATEGORICAL);
+		featureTypes.put("DatasetRegion-Region", VAR_CATEGORICAL);
+		featureTypes.put("Dataset-ProjectionType", VAR_CATEGORICAL);
+		featureTypes.put("Dataset-ProcessingLevel", VAR_CATEGORICAL);
+		featureTypes.put("DatasetParameter-Topic", VAR_CATEGORICAL);
+		featureTypes.put("DatasetParameter-Term", VAR_CATEGORICAL);
+		featureTypes.put("DatasetParameter-Category", VAR_CATEGORICAL);
+		featureTypes.put("DatasetPolicy-DataFormat", VAR_CATEGORICAL);
+		featureTypes.put("Collection-ShortName", VAR_CATEGORICAL);
+		featureTypes.put("DatasetSource-Source-Type", VAR_CATEGORICAL);
+		featureTypes.put("DatasetSource-Source-ShortName", VAR_CATEGORICAL);
+		featureTypes.put("DatasetSource-Sensor-ShortName", VAR_CATEGORICAL);
+		featureTypes.put("DatasetPolicy-Availability", VAR_CATEGORICAL);
+		featureTypes.put("Dataset-Provider-ShortName", VAR_CATEGORICAL);
+		featureTypes.put("Dataset-Derivative-ProcessingLevel", VAR_ORDINAL);
+		featureTypes.put("Dataset-Derivative-TemporalResolution", VAR_ORDINAL);
+		featureTypes.put("Dataset-Derivative-SpatialResolution", VAR_ORDINAL);
+	}
+
+	@Override
+	public void initFeatureWeight() {
+		// TODO Auto-generated method stub
+		//Map<String, Integer> featureWeights = new HashMap<>();
+		featureWeights.put("Dataset-Derivative-ProcessingLevel", 5);
+		featureWeights.put("DatasetParameter-Category", 5);
+		featureWeights.put("DatasetParameter-Variable", 5);
+		featureWeights.put("DatasetSource-Sensor-ShortName", 5);
+		featureWeights.put("DatasetPolicy-Availability", 4);
+		featureWeights.put("DatasetRegion-Region", 4);
+		featureWeights.put("DatasetSource-Source-Type", 4);
+		featureWeights.put("DatasetSource-Source-ShortName", 4);
+		featureWeights.put("DatasetParameter-Term", 4);
+		featureWeights.put("DatasetPolicy-DataFormat", 4);
+		featureWeights.put("Dataset-Derivative-SpatialResolution", 4);
+		featureWeights.put("Temporal_Covergae", 4);
+		featureWeights.put("DatasetParameter-Topic", 3);
+		featureWeights.put("Collection-ShortName", 3);
+		featureWeights.put("Dataset-Derivative-TemporalResolution", 3);
+		featureWeights.put("Spatial_Covergae", 3);
+		featureWeights.put("Dataset-ProjectionType", 1);
+		featureWeights.put("Dataset-Provider-ShortName", 1);
+	}
+
+	@Override
+	public void spatialSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB,
+			XContentBuilder contentBuilder) {
+		// TODO Auto-generated method stub
+		double topA = (double) metadataA.get("DatasetCoverage-Derivative-NorthLat");
+		double bottomA = (double) metadataA.get("DatasetCoverage-Derivative-SouthLat");
+		double leftA = (double) metadataA.get("DatasetCoverage-Derivative-WestLon");
+		double rightA = (double) metadataA.get("DatasetCoverage-Derivative-EastLon");
+		double areaA = (double) metadataA.get("DatasetCoverage-Derivative-Area");
+
+		double topB = (double) metadataB.get("DatasetCoverage-Derivative-NorthLat");
+		double bottomB = (double) metadataB.get("DatasetCoverage-Derivative-SouthLat");
+		double leftB = (double) metadataB.get("DatasetCoverage-Derivative-WestLon");
+		double rightB = (double) metadataB.get("DatasetCoverage-Derivative-EastLon");
+		double areaB = (double) metadataB.get("DatasetCoverage-Derivative-Area");
+
+		// Intersect area
+		double xOverlap = Math.max(0, Math.min(rightA, rightB) - Math.max(leftA, leftB));
+		double yOverlap = Math.max(0, Math.min(topA, topB) - Math.max(bottomA, bottomB));
+		double overlapArea = xOverlap * yOverlap;
+
+		// Calculate coverage similarity
+		double similarity = 0.0;
+		if (areaA > 0 && areaB > 0) {
+			similarity = (overlapArea / areaA + overlapArea / areaB) * 0.5;
+		}
+
+		try {
+			contentBuilder.field("Spatial_Covergae_Sim", similarity);
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+	}
+
+	@Override
+	public void temporalSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB,
+			XContentBuilder contentBuilder) {
+		// TODO Auto-generated method stub
+		double similarity = 0.0;
+		double startTimeA = Double.parseDouble((String) metadataA.get("Dataset-DatasetCoverage-StartTimeLong"));
+		String endTimeAStr = (String) metadataA.get("Dataset-DatasetCoverage-StopTimeLong");
+		double endTimeA = 0.0;
+		if ("".equals(endTimeAStr)) {
+			endTimeA = System.currentTimeMillis();
+		} else {
+			endTimeA = Double.parseDouble(endTimeAStr);
+		}
+		double timespanA = endTimeA - startTimeA;
+
+		double startTimeB = Double.parseDouble((String) metadataB.get("Dataset-DatasetCoverage-StartTimeLong"));
+		String endTimeBStr = (String) metadataB.get("Dataset-DatasetCoverage-StopTimeLong");
+		double endTimeB = 0.0;
+		if ("".equals(endTimeBStr)) {
+			endTimeB = System.currentTimeMillis();
+		} else {
+			endTimeB = Double.parseDouble(endTimeBStr);
+		}
+		double timespanB = endTimeB - startTimeB;
+
+		double intersect = 0.0;
+		if (startTimeB >= endTimeA || endTimeB <= startTimeA) {
+			intersect = 0.0;
+		} else if (startTimeB >= startTimeA && endTimeB <= endTimeA) {
+			intersect = timespanB;
+		} else if (startTimeA >= startTimeB && endTimeA <= endTimeB) {
+			intersect = timespanA;
+		} else {
+			intersect = (startTimeA > startTimeB) ? (endTimeB - startTimeA) : (endTimeA - startTimeB);
+		}
+
+		similarity = intersect / (Math.sqrt(timespanA) * Math.sqrt(timespanB));
+		try {
+			contentBuilder.field("Temporal_Covergae_Sim", similarity);
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+	}
+
+	@Override
+	public void categoricalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB,
+			XContentBuilder contentBuilder) {
+		// TODO Auto-generated method stub
+		for (String variable : featureTypes.keySet()) {
+			Integer type = featureTypes.get(variable);
+			if (type != VAR_CATEGORICAL) {
+				continue;
+			}
+
+			double similarity = 0.0;
+			Object valueA = metadataA.get(variable);
+			Object valueB = metadataB.get(variable);
+			if (valueA instanceof ArrayList) {
+				ArrayList<String> aList = (ArrayList<String>) valueA;
+				ArrayList<String> bList = (ArrayList<String>) valueB;
+				if (aList != null && bList != null) {
+
+					int lengthA = aList.size();
+					int lengthB = bList.size();
+					List<String> newAList = new ArrayList<>(aList);
+					List<String> newBList = new ArrayList<>(bList);
+					newAList.retainAll(newBList);
+					similarity = newAList.size() / lengthA;
+				}
+
+			} else if (valueA instanceof String) {
+				if (valueA.equals(valueB)) {
+					similarity = 1.0;
+				}
+			}
+
+			try {
+				contentBuilder.field(variable + "_Sim", similarity);
+			} catch (IOException e) {
+				// TODO Auto-generated catch block
+				e.printStackTrace();
+			}
+		}
+	}
+
+	@Override
+	public void ordinalVariablesSimilarity(Map<String, Object> metadataA, Map<String, Object> metadataB,
+			XContentBuilder contentBuilder) {
+		// TODO Auto-generated method stub
+		for (String variable : featureTypes.keySet()) {
+			Integer type = featureTypes.get(variable);
+			if (type != VAR_ORDINAL) {
+				continue;
+			}
+
+			double similarity = 0.0;
+			Object valueA = metadataA.get(variable);
+			Object valueB = metadataB.get(variable);
+			if (valueA != null && valueB != null) {
+
+				double a = (double) valueA;
+				double b = (double) valueB;
+				if (a != 0.0) {
+					similarity = 1 - Math.abs(b - a) / a;
+					if (similarity < 0) {
+						similarity = 0.0;
+					}
+				}
+			}
+
+			try {
+				contentBuilder.field(variable + "_Sim", similarity);
+			} catch (IOException e) {
+				// TODO Auto-generated catch block
+				e.printStackTrace();
+			}
+		}
+	}
+}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
deleted file mode 100644
index 7bdbd0d..0000000
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/RecomData.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License"); you 
- * may not use this file except in compliance with the License. 
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sdap.mudrod.recommendation.structure;
-
-import com.google.gson.Gson;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-
-import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
-import org.apache.sdap.mudrod.driver.ESDriver;
-import org.apache.sdap.mudrod.driver.SparkDriver;
-import org.apache.sdap.mudrod.main.MudrodEngine;
-import org.elasticsearch.action.search.SearchRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.sort.SortOrder;
-
-import java.io.IOException;
-import java.text.DecimalFormat;
-import java.util.*;
-
-/**
- * This class is used to test recommendation result similarity and session-level
- * similarity
- */
-public class RecomData extends DiscoveryStepAbstract {
-
-  /**
-   *
-   */
-  private static final long serialVersionUID = 1L;
-  protected transient List<LinkedTerm> termList = new ArrayList<>();
-  DecimalFormat df = new DecimalFormat("#.00");
-  protected static final String INDEX_NAME = "indexName";
-  private static final String WEIGHT = "weight";
-
-  class LinkedTerm {
-    public String term = null;
-    public double weight = 0;
-    public String model = null;
-
-    public LinkedTerm(String str, double w, String m) {
-      term = str;
-      weight = w;
-      model = m;
-    }
-  }
-
-  public RecomData(Properties props, ESDriver es, SparkDriver spark) {
-    super(props, es, spark);
-  }
-
-  @Override
-  public Object execute() {
-    return null;
-  }
-
-  @Override
-  public Object execute(Object o) {
-    return null;
-  }
-
-  public JsonObject getRecomDataInJson(String input, int num) {
-    String type = props.getProperty("metadataTermTFIDFSimType");
-    Map<String, Double> sortedOBSimMap = getRelatedData(type, input, num + 5);
-    JsonElement linkedJson = mapToJson(sortedOBSimMap, num);
-
-    // type = props.getProperty("metadataTermTFIDFSimType");
-    type = props.getProperty("metadataCodeSimType");
-
-    Map<String, Double> sortedMBSimMap = getRelatedData(type, input, num + 5);
-    JsonElement relatedJson = mapToJson(sortedMBSimMap, num);
-
-    JsonObject json = new JsonObject();
-
-    json.add("TFIDFSim", linkedJson);
-    json.add("TopicSim", relatedJson);
-
-    return json;
-  }
-
-  protected JsonElement mapToJson(Map<String, Double> wordweights, int num) {
-    Gson gson = new Gson();
-
-    List<JsonObject> nodes = new ArrayList<>();
-    Set<String> words = wordweights.keySet();
-    int i = 0;
-    for (String wordB : words) {
-      JsonObject node = new JsonObject();
-      node.addProperty("name", wordB);
-      node.addProperty("weight", wordweights.get(wordB));
-      nodes.add(node);
-
-      i += 1;
-      if (i >= num) {
-        break;
-      }
-    }
-
-    String nodesJson = gson.toJson(nodes);
-    JsonElement nodesElement = gson.fromJson(nodesJson, JsonElement.class);
-
-    return nodesElement;
-  }
-
-  public Map<String, Double> getRelatedData(String type, String input, int num) {
-    termList = new ArrayList<>();
-    Map<String, Double> termsMap = new HashMap<>();
-    Map<String, Double> sortedMap = new HashMap<>();
-    try {
-      List<LinkedTerm> links = getRelatedDataFromES(type, input, num);
-      for (LinkedTerm link : links) {
-        termsMap.put(link.term, link.weight);
-      }
-
-      sortedMap = sortMapByValue(termsMap); // terms_map will be empty
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-
-    return sortedMap;
-  }
-
-  public List<LinkedTerm> getRelatedDataFromES(String type, String input, int num) {
-    SearchRequestBuilder builder = es.getClient().prepareSearch(props.getProperty(INDEX_NAME)).setTypes(type).setQuery(QueryBuilders.termQuery("concept_A", input)).addSort(WEIGHT, SortOrder.DESC)
-            .setSize(num);
-
-    SearchResponse usrhis = builder.execute().actionGet();
-
-    for (SearchHit hit : usrhis.getHits().getHits()) {
-      Map<String, Object> result = hit.getSource();
-      String conceptB = (String) result.get("concept_B");
-
-      if (!conceptB.equals(input)) {
-        LinkedTerm lTerm = new LinkedTerm(conceptB, (double) result.get(WEIGHT), type);
-        termList.add(lTerm);
-      }
-    }
-
-    return termList;
-  }
-
-  public Map<String, Double> sortMapByValue(Map<String, Double> passedMap) {
-    List<String> mapKeys = new ArrayList<>(passedMap.keySet());
-    List<Double> mapValues = new ArrayList<>(passedMap.values());
-    Collections.sort(mapValues, Collections.reverseOrder());
-    Collections.sort(mapKeys, Collections.reverseOrder());
-
-    LinkedHashMap<String, Double> sortedMap = new LinkedHashMap<>();
-
-    Iterator<Double> valueIt = mapValues.iterator();
-    while (valueIt.hasNext()) {
-      Object val = valueIt.next();
-      Iterator<String> keyIt = mapKeys.iterator();
-
-      while (keyIt.hasNext()) {
-        Object key = keyIt.next();
-        String comp1 = passedMap.get(key).toString();
-        String comp2 = val.toString();
-
-        if (comp1.equals(comp2)) {
-          passedMap.remove(key);
-          mapKeys.remove(key);
-          sortedMap.put((String) key, (Double) val);
-          break;
-        }
-      }
-    }
-    return sortedMap;
-  }
-
-  public static void main(String[] args) throws IOException {
-
-    MudrodEngine me = new MudrodEngine();
-    Properties props = me.loadConfig();
-    ESDriver es = new ESDriver(me.getConfig());
-    RecomData test = new RecomData(props, es, null);
-
-    String input = "AQUARIUS_L3_SSS_SMIA_MONTHLY-CLIMATOLOGY_V4";
-    JsonObject json = test.getRecomDataInJson(input, 10);
-
-    System.out.println(json.toString());
-  }
-}
diff --git a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/package-info.java
index 88ec378..c9095c7 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/recommendation/structure/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes the data structure required by recommendation module.
  */
-package org.apache.sdap.mudrod.recommendation.structure;
+package org.apache.sdap.mudrod.recommendation.structure;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/semantics/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/semantics/package-info.java
index 86912f2..d543e98 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/semantics/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/semantics/package-info.java
@@ -15,4 +15,4 @@
  * This package includes SVD transformation function, methods of calculating
  * similarity from CSV, and saving triples into Elasticsearch
  */
-package org.apache.sdap.mudrod.semantics;
+package org.apache.sdap.mudrod.semantics;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ClickstreamImporter.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ClickstreamImporter.java
index 779105f..7630075 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ClickstreamImporter.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ClickstreamImporter.java
@@ -17,6 +17,7 @@
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.elasticsearch.action.index.IndexRequest;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 
@@ -78,7 +79,7 @@ public void importfromCSVtoES() {
     String cvsSplitBy = ",";
 
     try {
-      br = new BufferedReader(new FileReader(props.getProperty("clickstreamMatrix")));
+      br = new BufferedReader(new FileReader(props.getProperty(MudrodConstants.CLICKSTREAM_PATH)));
       String line = br.readLine();
       // first item needs to be skipped
       String[] dataList = line.split(cvsSplitBy);
@@ -110,4 +111,4 @@ public void importfromCSVtoES() {
     }
   }
 
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Ranker.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Ranker.java
index 98522b4..a4f7be9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Ranker.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Ranker.java
@@ -42,7 +42,7 @@
   public Ranker(Properties props, ESDriver es, SparkDriver spark, String learnerType) {
     super(props, es, spark);
     this.learnerType = learnerType;
-    le = new Learner(learnerType, spark, props.getProperty(MudrodConstants.SVM_SGD_MODEL));
+    le = new Learner(learnerType, spark, props.getProperty(MudrodConstants.RANKING_MODEL));
   }
 
   /**
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
index 8791bf4..520e911 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/Searcher.java
@@ -16,17 +16,18 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.discoveryengine.MudrodAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.ssearch.structure.SResult;
+
 import org.elasticsearch.action.search.SearchRequestBuilder;
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.index.query.BoolQueryBuilder;
 import org.elasticsearch.index.query.QueryBuilder;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.sort.SortBuilder;
 import org.elasticsearch.search.sort.SortOrder;
 
 import java.io.Serializable;
@@ -105,7 +106,6 @@ public Double exists(ArrayList<String> strList, String query) {
    * @param type           type name in Elasticsearch
    * @param query          regular query string
    * @param queryOperator query mode- query, or, and
-   * @param rankOption a keyword used to dertermine the ElasticSearch SortOrder 
    * @return a list of search result
    */
   @SuppressWarnings("unchecked")
@@ -186,7 +186,7 @@ public Double exists(ArrayList<String> strList, String query) {
       }
 
       ArrayList<String> longdate = (ArrayList<String>) result.get("DatasetCitation-ReleaseDateLong");
-      Date date = new Date(Long.valueOf(longdate.get(0)));
+      Date date = new Date(Long.valueOf(longdate.get(0)).longValue());
       SimpleDateFormat df2 = new SimpleDateFormat("MM/dd/yyyy");
       String dateText = df2.format(date);
 
@@ -248,7 +248,6 @@ public Double exists(ArrayList<String> strList, String query) {
    * @param type           type name in Elasticsearch
    * @param query          regular query string
    * @param queryOperator query mode- query, or, and
-   * @param rankOption a keyword used to dertermine the ElasticSearch SortOrder 
    * @param rr             selected ranking method
    * @return search results
    */
@@ -260,19 +259,19 @@ public String ssearch(String index, String type, String query, String queryOpera
     Gson gson = new Gson();
     List<JsonObject> fileList = new ArrayList<>();
 
-    for (SResult aLi : li) {
+    for (int i = 0; i < li.size(); i++) {
       JsonObject file = new JsonObject();
-      file.addProperty("Short Name", (String) SResult.get(aLi, "shortName"));
-      file.addProperty("Long Name", (String) SResult.get(aLi, "longName"));
-      file.addProperty("Topic", (String) SResult.get(aLi, "topic"));
-      file.addProperty("Description", (String) SResult.get(aLi, "description"));
-      file.addProperty("Release Date", (String) SResult.get(aLi, "relase_date"));
+      file.addProperty("Short Name", (String) SResult.get(li.get(i), "shortName"));
+      file.addProperty("Long Name", (String) SResult.get(li.get(i), "longName"));
+      file.addProperty("Topic", (String) SResult.get(li.get(i), "topic"));
+      file.addProperty("Description", (String) SResult.get(li.get(i), "description"));
+      file.addProperty("Release Date", (String) SResult.get(li.get(i), "relase_date"));
       fileList.add(file);
 
-      file.addProperty("Start/End Date", (String) SResult.get(aLi, "startDate") + " - " + (String) SResult.get(aLi, "endDate"));
-      file.addProperty("Processing Level", (String) SResult.get(aLi, "processingLevel"));
+      file.addProperty("Start/End Date", (String) SResult.get(li.get(i), "startDate") + " - " + (String) SResult.get(li.get(i), "endDate"));
+      file.addProperty("Processing Level", (String) SResult.get(li.get(i), "processingLevel"));
 
-      file.addProperty("Sensor", (String) SResult.get(aLi, "sensors"));
+      file.addProperty("Sensor", (String) SResult.get(li.get(i), "sensors"));
     }
     JsonElement fileListElement = gson.toJsonTree(fileList);
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/package-info.java
index b635b64..70d05f4 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/package-info.java
@@ -15,4 +15,4 @@
  * This package includes classes for semantic search, such as click stream importer,
  * query dispatcher, semantic searcher, and ranker (ranksvm, ordinal/linear regression)
  */
-package org.apache.sdap.mudrod.ssearch;
+package org.apache.sdap.mudrod.ssearch;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
index 4e43ec8..ba3c88e 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/DataGenerator.java
@@ -20,7 +20,6 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 /**
  * SVMData is a program designed to create appropriate input data for the RankSVM
@@ -34,10 +33,10 @@
   private static boolean isMultFiles;
 
   private static String[] myHeader;
-  private static List<List<String>> myMasterList = new ArrayList<>();
+  private static List<List<String>> myMasterList = new ArrayList<List<String>>();
 
   // HashMap used for comparing evaluation classes
-  public static final Map<String, Integer> map1 = new HashMap<>();
+  public static final HashMap<String, Integer> map1 = new HashMap<String, Integer>();
 
   static {
     map1.put("Excellent", 7);
@@ -83,7 +82,7 @@ public static void parseFile() {
     try {
       String sourceDir = mySourceDir;
 
-      if (isMultFiles) // Case where multiple files have to be processed
+      if (isMultFiles == true) // Case where multiple files have to be processed
       {
         // Iterate over files in directory 
         File directory = new File(sourceDir);
@@ -136,7 +135,7 @@ public static void parseFile() {
    * @param arr the parsed contents of the original CSV file
    */
   public static void calculateVec(String[][] arr) {
-    List<List<String>> listofLists = new ArrayList<>(); // Holds calculations 
+    List<List<String>> listofLists = new ArrayList<List<String>>(); // Holds calculations 
 
     int rowStart = 1;
     for (int row = rowStart; row < arr.length; row++) // Start at row 1 because row 0 is heading lol
@@ -145,6 +144,7 @@ public static void calculateVec(String[][] arr) {
         List<String> colList = new ArrayList<String>(); // create vector to store all values inside of a column, which is stored inside 2D vector
         for (int col = 0; col < arr[0].length - 1; col++) // Columns go until the next to last column
         {
+          //System.out.println(col + " " + arr[row][col]);
           // Extract double value from each cell
           double x1 = Double.parseDouble(arr[row][col]);
           double x2 = Double.parseDouble(arr[row + i][col]);
@@ -209,8 +209,8 @@ public static int compareEvaluation(String eval1, String eval2) {
    */
   public static List<List<String>> equalizeList(List<List<String>> rawList) {
     // Create two sets - one containing row index for +1 and the other for -1
-    List<Integer> pos1List = new ArrayList<>();
-    List<Integer> neg1List = new ArrayList<>();
+    List<Integer> pos1List = new ArrayList<Integer>();
+    List<Integer> neg1List = new ArrayList<Integer>();
 
     for (int i = 0; i < rawList.size(); i++) // Iterate through all rows to get indexes
     {
@@ -276,7 +276,9 @@ public static int compareEvaluation(String eval1, String eval2) {
   public static void storeHead(String[][] arr) {
     myHeader = new String[arr[0].length]; // Reside private variable
 
-    System.arraycopy(arr[0], 0, myHeader, 0, arr[0].length);
+    for (int col = 0; col < arr[0].length; col++) {
+      myHeader[col] = arr[0][col];
+    }
   }
 
   /**
@@ -294,9 +296,10 @@ public static void writeCSVfile(List<List<String>> list) {
       if (!alreadyExists) {
         csvOutput.writeNext(myHeader); // Write the text headers first before data
 
-        for (List<String> aList : list) { // Iterate through all rows in 2D array
-          String[] temp = new String[aList.size()]; // Convert row array list in 2D array to regular string array
-          temp = aList.toArray(temp);
+        for (int i = 0; i < list.size(); i++) // Iterate through all rows in 2D array
+        {
+          String[] temp = new String[list.get(i).size()]; // Convert row array list in 2D array to regular string array
+          temp = list.get(i).toArray(temp);
           csvOutput.writeNext(temp); // Write this array to the file
         }
       }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
index 0efb82f..ad7f159 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/Evaluator.java
@@ -28,12 +28,12 @@
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param k    the number of elements needed to be included in the calculation
+   * @param K    the number of elements needed to be included in the calculation
    * @return NDCG score
    */
-  public double getNDCG(int[] list, int k) {
-    double dcg = this.getDCG(list, k);
-    double idcg = this.getIDCG(list, k);
+  public double getNDCG(int[] list, int K) {
+    double dcg = this.getDCG(list, K);
+    double idcg = this.getIDCG(list, K);
     double ndcg = 0.0;
     if (idcg > 0.0) {
       ndcg = dcg / idcg;
@@ -46,21 +46,22 @@ public double getNDCG(int[] list, int k) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param k    the number of elements needed to be included in the calculation
+   * @param K    the number of elements needed to be included in the calculation
    * @return precision at K
    */
-  public double getPrecision(int[] list, int k) {
+  public double getPrecision(int[] list, int K) {
     int size = list.length;
-    if (size == 0 || k == 0) {
+    if (size == 0 || K == 0) {
       return 0;
     }
 
-    if (k > size) {
-      k = size;
+    if (K > size) {
+      K = size;
     }
 
-    int relDocNum = this.getRelevantDocNum(list, k);
-    return (double) relDocNum / (double) k;
+    int rel_doc_num = this.getRelevantDocNum(list, K);
+    double precision = (double) rel_doc_num / (double) K;
+    return precision;
   }
 
   /**
@@ -68,26 +69,26 @@ public double getPrecision(int[] list, int k) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param k    the number of elements needed to be included in the calculation
+   * @param K    the number of elements needed to be included in the calculation
    * @return the number of relevant element
    */
-  private int getRelevantDocNum(int[] list, int k) {
+  private int getRelevantDocNum(int[] list, int K) {
     int size = list.length;
-    if (size == 0 || k == 0) {
+    if (size == 0 || K == 0) {
       return 0;
     }
 
-    if (k > size) {
-      k = size;
+    if (K > size) {
+      K = size;
     }
 
-    int relNum = 0;
-    for (int i = 0; i < k; i++) {
+    int rel_num = 0;
+    for (int i = 0; i < K; i++) {
       if (list[i] > 3) { // 3 refers to "OK"
-        relNum++;
+        rel_num++;
       }
     }
-    return relNum;
+    return rel_num;
   }
 
   /**
@@ -95,25 +96,25 @@ private int getRelevantDocNum(int[] list, int k) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param k    the number of elements needed to be included in the calculation
+   * @param K    the number of elements needed to be included in the calculation
    * @return DCG score
    */
-  private double getDCG(int[] list, int k) {
+  private double getDCG(int[] list, int K) {
     int size = list.length;
-    if (size == 0 || k == 0) {
+    if (size == 0 || K == 0) {
       return 0.0;
     }
 
-    if (k > size) {
-      k = size;
+    if (K > size) {
+      K = size;
     }
 
     double dcg = list[0];
-    for (int i = 1; i < k; i++) {
+    for (int i = 1; i < K; i++) {
       int rel = list[i];
       int pos = i + 1;
-      double relLog = Math.log(pos) / Math.log(2);
-      dcg += rel / relLog;
+      double rel_log = Math.log(pos) / Math.log(2);
+      dcg += rel / rel_log;
     }
     return dcg;
   }
@@ -123,10 +124,10 @@ private double getDCG(int[] list, int k) {
    *
    * @param list a list of integer with each integer element indicating
    *             the performance at its position
-   * @param k    the number of elements needed to be included in the calculation
+   * @param K    the number of elements needed to be included in the calculation
    * @return IDCG score
    */
-  private double getIDCG(int[] list, int k) {
+  private double getIDCG(int[] list, int K) {
     Comparator<Integer> comparator = new Comparator<Integer>() {
       @Override
       public int compare(Integer o1, Integer o2) {
@@ -134,9 +135,11 @@ public int compare(Integer o1, Integer o2) {
       }
     };
     List<Integer> sortlist = IntStream.of(list).boxed().collect(Collectors.toList());
+    ;
     Collections.sort(sortlist, comparator);
     int[] sortedArr = sortlist.stream().mapToInt(i -> i).toArray();
-    return this.getDCG(sortedArr, k);
+    double idcg = this.getDCG(sortedArr, K);
+    return idcg;
   }
 
 }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkFormatter.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkFormatter.java
index ad61607..a5e8bc6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkFormatter.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkFormatter.java
@@ -1,8 +1,24 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License"); you 
+ * may not use this file except in compliance with the License. 
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package org.apache.sdap.mudrod.ssearch.ranking;
 
 import java.io.*;
 import java.text.DecimalFormat;
 
+/*
+ * Formats regular training data into spark SVM formats
+ */
 public class SparkFormatter {
   DecimalFormat NDForm = new DecimalFormat("#.###");
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkSVM.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkSVM.java
index 0d0eb8d..ac32f6d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkSVM.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/SparkSVM.java
@@ -21,6 +21,9 @@
 import org.apache.spark.mllib.regression.LabeledPoint;
 import org.apache.spark.mllib.util.MLUtils;
 
+/*
+ * Creates SVM ranking model using Spark
+ */
 public class SparkSVM {
 
   private SparkSVM() {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/TrainingImporter.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/TrainingImporter.java
index 2f27aa0..ff55c85 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/TrainingImporter.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/TrainingImporter.java
@@ -54,7 +54,7 @@ public void addMapping() {
           .startObject("dataID").field("type", "string").field("index", "not_analyzed").endObject().startObject("label").field("type", "string").field("index", "not_analyzed").endObject().endObject()
           .endObject().endObject();
 
-      es.getClient().admin().indices().preparePutMapping(props.getProperty("indexName")).setType("trainingranking").setSource(Mapping).execute().actionGet();
+      es.getClient().admin().indices().preparePutMapping(props.getProperty(MudrodConstants.ES_INDEX_NAME)).setType("trainingranking").setSource(Mapping).execute().actionGet();
     } catch (IOException e) {
       e.printStackTrace();
     }
@@ -78,7 +78,7 @@ public void importTrainingSet(String dataFolder) throws IOException {
         String[] list = line.split(",");
         String query = file.getName().replace(".csv", "");
         if (list.length > 0) {
-          IndexRequest ir = new IndexRequest(props.getProperty("indexName"), "trainingranking")
+          IndexRequest ir = new IndexRequest(props.getProperty(MudrodConstants.ES_INDEX_NAME), "trainingranking")
               .source(jsonBuilder().startObject().field("query", query).field("dataID", list[0]).field("label", list[list.length - 1]).endObject());
           es.getBulkProcessor().add(ir);
         }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/package-info.java
index e25207e..ebcc77d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/ranking/package-info.java
@@ -15,4 +15,4 @@
  * This package includes classes for importing training data, ML models,
  * generating input data for RankSVM, and evaluating ranking results
  */
-package org.apache.sdap.mudrod.ssearch.ranking;
+package org.apache.sdap.mudrod.ssearch.ranking;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
index fce4e34..cf94ddb 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/SResult.java
@@ -94,8 +94,8 @@ public SResult(String shortName, String longName, String topic, String descripti
   }
 
   public SResult(SResult sr) {
-    for (String aRlist : rlist) {
-      set(this, aRlist, get(sr, aRlist));
+    for (int i = 0; i < rlist.length; i++) {
+      set(this, rlist[i], get(sr, rlist[i]));
     }
   }
 
@@ -107,8 +107,8 @@ public SResult(SResult sr) {
    */
   public static String getHeader(String delimiter) {
     String str = "";
-    for (String aRlist : rlist) {
-      str += aRlist + delimiter;
+    for (int i = 0; i < rlist.length; i++) {
+      str += rlist[i] + delimiter;
     }
     str = str + "label" + "\n";
     return "ShortName" + delimiter + "below" + delimiter + str;
@@ -122,8 +122,8 @@ public static String getHeader(String delimiter) {
    */
   public String toString(String delimiter) {
     String str = "";
-    for (String aRlist : rlist) {
-      double score = get(this, aRlist);
+    for (int i = 0; i < rlist.length; i++) {
+      double score = get(this, rlist[i]);
       str += score + delimiter;
     }
     str = str + label + "\n";
diff --git a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/package-info.java
index 5e75a40..1069390 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/ssearch/structure/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes data structure needed for ranking process
  */
-package org.apache.sdap.mudrod.ssearch.structure;
+package org.apache.sdap.mudrod.ssearch.structure;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/ESTransportClient.java b/core/src/main/java/org/apache/sdap/mudrod/utils/ESTransportClient.java
index 3492306..9f53586 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/ESTransportClient.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/ESTransportClient.java
@@ -52,4 +52,4 @@ public void close() {
     super.close();
   }
 
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/LabeledRowMatrix.java b/core/src/main/java/org/apache/sdap/mudrod/utils/LabeledRowMatrix.java
index edf4f59..8f00df6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/LabeledRowMatrix.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/LabeledRowMatrix.java
@@ -35,4 +35,4 @@ public LabeledRowMatrix() {
     // TODO Auto-generated constructor stub
   }
 
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java b/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
index a574041..3245da6 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/LinkageTriple.java
@@ -85,19 +85,20 @@ public static void insertTriples(ESDriver es, List<LinkageTriple> triples, Strin
     }
 
     es.createBulkProcessor();
-    for (LinkageTriple triple : triples) {
+    int size = triples.size();
+    for (int i = 0; i < size; i++) {
 
       XContentBuilder jsonBuilder = jsonBuilder().startObject();
       if (bTriple) {
 
-        jsonBuilder.field("concept_A", triple.keyA);
-        jsonBuilder.field("concept_B", triple.keyB);
+        jsonBuilder.field("concept_A", triples.get(i).keyA);
+        jsonBuilder.field("concept_B", triples.get(i).keyB);
 
       } else {
-        jsonBuilder.field("keywords", triple.keyA + "," + triple.keyB);
+        jsonBuilder.field("keywords", triples.get(i).keyA + "," + triples.get(i).keyB);
       }
 
-      jsonBuilder.field("weight", Double.parseDouble(df.format(triple.weight)));
+      jsonBuilder.field("weight", Double.parseDouble(df.format(triples.get(i).weight)));
       jsonBuilder.endObject();
 
       IndexRequest ir = new IndexRequest(index, type).source(jsonBuilder);
@@ -105,10 +106,10 @@ public static void insertTriples(ESDriver es, List<LinkageTriple> triples, Strin
 
       if (bTriple && bSymmetry) {
         XContentBuilder symmetryJsonBuilder = jsonBuilder().startObject();
-        symmetryJsonBuilder.field("concept_A", triple.keyB);
-        symmetryJsonBuilder.field("concept_B", triple.keyA);
+        symmetryJsonBuilder.field("concept_A", triples.get(i).keyB);
+        symmetryJsonBuilder.field("concept_B", triples.get(i).keyA);
 
-        symmetryJsonBuilder.field("weight", Double.parseDouble(df.format(triple.weight)));
+        symmetryJsonBuilder.field("weight", Double.parseDouble(df.format(triples.get(i).weight)));
 
         symmetryJsonBuilder.endObject();
 
@@ -120,14 +121,14 @@ public static void insertTriples(ESDriver es, List<LinkageTriple> triples, Strin
   }
 
   public static void addMapping(ESDriver es, String index, String type) {
-    XContentBuilder mapping;
+    XContentBuilder Mapping;
     try {
-      mapping = jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type", "string").field("index", "not_analyzed").endObject()
+      Mapping = jsonBuilder().startObject().startObject(type).startObject("properties").startObject("concept_A").field("type", "string").field("index", "not_analyzed").endObject()
           .startObject("concept_B").field("type", "string").field("index", "not_analyzed").endObject()
 
           .endObject().endObject().endObject();
 
-      es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(mapping).execute().actionGet();
+      es.getClient().admin().indices().preparePutMapping(index).setType(type).setSource(Mapping).execute().actionGet();
     } catch (IOException e) {
       e.printStackTrace();
     }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java b/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
index 7eef272..8259ce7 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/MatrixUtil.java
@@ -129,8 +129,9 @@ public static LabeledRowMatrix createWordDocMatrix(JavaPairRDD<String, List<Stri
       public Iterator<Tuple2<Tuple2<String, Long>, Double>> call(Tuple2<List<String>, Long> docwords) throws Exception {
         List<Tuple2<Tuple2<String, Long>, Double>> pairs = new ArrayList<>();
         List<String> words = docwords._1;
-        for (String word : words) {
-          Tuple2<String, Long> worddoc = new Tuple2<>(word, docwords._2);
+        int n = words.size();
+        for (int i = 0; i < n; i++) {
+          Tuple2<String, Long> worddoc = new Tuple2<>(words.get(i), docwords._2);
           pairs.add(new Tuple2<Tuple2<String, Long>, Double>(worddoc, 1.0));
         }
         return pairs.iterator();
@@ -233,8 +234,9 @@ public static LabeledRowMatrix createDocWordMatrix(JavaPairRDD<String, List<Stri
       public Iterator<Tuple2<Tuple2<String, String>, Double>> call(Tuple2<String, List<String>> docwords) throws Exception {
         List<Tuple2<Tuple2<String, String>, Double>> pairs = new ArrayList<>();
         List<String> words = docwords._2;
-        for (String word : words) {
-          Tuple2<String, String> worddoc = new Tuple2<>(docwords._1, word);
+        int n = words.size();
+        for (int i = 0; i < n; i++) {
+          Tuple2<String, String> worddoc = new Tuple2<>(docwords._1, words.get(i));
           pairs.add(new Tuple2<Tuple2<String, String>, Double>(worddoc, 1.0));
         }
         return pairs.iterator();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/SimilarityUtil.java b/core/src/main/java/org/apache/sdap/mudrod/utils/SimilarityUtil.java
index 2fcdaf6..6fdc66d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/SimilarityUtil.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/SimilarityUtil.java
@@ -274,4 +274,4 @@ public static double pearsonDistance(Vector vecA, Vector vecB) {
   public static double cosineDistance(Vector vecA, Vector vecB) {
     return 1;
   }
-}
+}
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/utils/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/utils/package-info.java
index a58d902..1adb0b9 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/utils/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/utils/package-info.java
@@ -15,4 +15,4 @@
  * This package includes utilities classes for calculating similarity and
  * parsing HTTP request
  */
-package org.apache.sdap.mudrod.utils;
+package org.apache.sdap.mudrod.utils;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/package-info.java
index f762a6f..9c87a7d 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/package-info.java
@@ -15,4 +15,4 @@
  * This package includes web log pre-processing, processing, and data structure
  * classes.
  */
-package org.apache.sdap.mudrod.weblog;
+package org.apache.sdap.mudrod.weblog;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
index 1e25cec..8f4e263 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/partition/KGreedyPartitionSolver.java
@@ -20,22 +20,22 @@ public KGreedyPartitionSolver(boolean bsorted) {
     List<String> months = null;
 
     if (!this.bsorted) {
-      LinkedHashMap<String, Double> sortedMap = (LinkedHashMap<String, Double>) this.sortMapByValue(labelNums);
-      lista = new ArrayList<>(sortedMap.values());
-      months = new ArrayList<>(sortedMap.keySet());
+      LinkedHashMap sortedMap = this.sortMapByValue(labelNums);
+      lista = new ArrayList(sortedMap.values());
+      months = new ArrayList(sortedMap.keySet());
     } else {
-      lista = new ArrayList<>(labelNums.values());
-      months = new ArrayList<>(labelNums.keySet());
+      lista = new ArrayList(labelNums.values());
+      months = new ArrayList(labelNums.keySet());
     }
 
     List<List<Double>> parts = new ArrayList<>();
     List<List<String>> splitMonths = new ArrayList<>();
 
     for (int i = 0; i < k; i++) {
-      List<Double> part = new ArrayList<>();
+      List<Double> part = new ArrayList();
       parts.add(part);
 
-      List<String> monthList = new ArrayList<>();
+      List<String> monthList = new ArrayList();
       splitMonths.add(monthList);
     }
 
@@ -47,10 +47,10 @@ public KGreedyPartitionSolver(boolean bsorted) {
       for (int i = 0; i < parts.size(); i++) {
         List<Double> part = parts.get(i);
         if (minimalSum == -1) {
-          minimalSum = suma(part);
+          minimalSum = Suma(part);
           position = i;
-        } else if (suma(part) < minimalSum) {
-          minimalSum = suma(part);
+        } else if (Suma(part) < minimalSum) {
+          minimalSum = Suma(part);
           position = i;
         }
       }
@@ -65,29 +65,38 @@ public KGreedyPartitionSolver(boolean bsorted) {
       j++;
     }
 
-    Map<String, Integer> labelGroups = new HashMap<>();
+    /*  for(int i=0; i<splitMonths.size(); i++){
+        System.out.println("group:" + i);
+        printStrList(splitMonths.get(i));
+      }
+      
+      for(int i=0; i<parts.size(); i++){
+        print(parts.get(i));
+      }*/
+
+    Map<String, Integer> LabelGroups = new HashMap<String, Integer>();
     for (int i = 0; i < splitMonths.size(); i++) {
       List<String> list = splitMonths.get(i);
-      for (String aList : list) {
-        labelGroups.put(aList, i);
+      for (int m = 0; m < list.size(); m++) {
+        LabelGroups.put(list.get(m), i);
       }
     }
 
-    return labelGroups;
+    return LabelGroups;
   }
 
-  public Map<String, Double> sortMapByValue(Map<String, Double> passedMap) {
-    List<String> mapKeys = new ArrayList<>(passedMap.keySet());
-    List<Double> mapValues = new ArrayList<>(passedMap.values());
+  public LinkedHashMap<String, Double> sortMapByValue(Map passedMap) {
+    List mapKeys = new ArrayList(passedMap.keySet());
+    List mapValues = new ArrayList(passedMap.values());
     Collections.sort(mapValues, Collections.reverseOrder());
     Collections.sort(mapKeys, Collections.reverseOrder());
 
-    LinkedHashMap<String, Double> sortedMap = new LinkedHashMap<>();
+    LinkedHashMap sortedMap = new LinkedHashMap();
 
-    Iterator<Double> valueIt = mapValues.iterator();
+    Iterator valueIt = mapValues.iterator();
     while (valueIt.hasNext()) {
       Object val = valueIt.next();
-      Iterator<String> keyIt = mapKeys.iterator();
+      Iterator keyIt = mapKeys.iterator();
 
       while (keyIt.hasNext()) {
         Object key = keyIt.next();
@@ -107,12 +116,27 @@ public KGreedyPartitionSolver(boolean bsorted) {
     return sortedMap;
   }
 
-  private Double suma(List<Double> part) {
+  private Double Suma(List<Double> part) {
     Double ret = 0.0;
-    for (Double aPart : part) {
-      ret += aPart;
+    for (int i = 0; i < part.size(); i++) {
+      ret += part.get(i);
     }
     return ret;
   }
 
+  private void print(List<Double> list) {
+    /*for (int i = 0; i < list.size(); i++) {
+        System.out.print(list.get(i)+",");
+    }*/
+    System.out.print("sum is:" + Suma(list));
+    System.out.println();
+  }
+
+  private void printStrList(List<String> list) {
+    for (int i = 0; i < list.size(); i++) {
+      System.out.print(list.get(i) + ",");
+    }
+    System.out.println();
+  }
+
 }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ClickStreamGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ClickStreamGenerator.java
index e678854..886cd4a 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ClickStreamGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ClickStreamGenerator.java
@@ -16,6 +16,7 @@
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.utils.LabeledRowMatrix;
 import org.apache.sdap.mudrod.utils.MatrixUtil;
 import org.apache.sdap.mudrod.weblog.structure.ClickStream;
@@ -48,11 +49,11 @@ public Object execute() {
     LOG.info("Starting ClickStreamGenerator...");
     startTime = System.currentTimeMillis();
 
-    String clickstremMatrixFile = props.getProperty("clickstreamMatrix");
+    String clickstremMatrixFile = props.getProperty(MudrodConstants.CLICKSTREAM_PATH);
     try {
       SessionExtractor extractor = new SessionExtractor();
       JavaRDD<ClickStream> clickstreamRDD = extractor.extractClickStreamFromES(this.props, this.es, this.spark);
-      int weight = Integer.parseInt(props.getProperty("downloadWeight"));
+      int weight = Integer.parseInt(props.getProperty(MudrodConstants.DOWNLOAD_WEIGHT));
       JavaPairRDD<String, List<String>> metaddataQueryRDD = extractor.bulidDataQueryRDD(clickstreamRDD, weight);
       LabeledRowMatrix wordDocMatrix = MatrixUtil.createWordDocMatrix(metaddataQueryRDD);
 
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/CrawlerDetection.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/CrawlerDetection.java
index 79a014e..3e782a7 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/CrawlerDetection.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/CrawlerDetection.java
@@ -56,20 +56,6 @@
   private static final long serialVersionUID = 1L;
   private static final Logger LOG = LoggerFactory.getLogger(CrawlerDetection.class);
 
-  public static final String CRAWLER = "crawler";
-  public static final String GOOGLE_BOT = "googlebot";
-  public static final String BING_BOT = "bingbot";
-  public static final String YAHOO_BOT = "slurp";
-  public static final String YACY_BOT = "yacybot";
-  public static final String ROGER_BOT = "rogerbot";
-  public static final String YANDEX_BOT = "yandexbot";
-
-  public static final String NO_AGENT_BOT = "-";
-  public static final String PERL_BOT = "libwww-perl/";
-  public static final String APACHE_HHTP = "apache-httpclient/";
-  public static final String JAVA_CLIENT = "java/";
-  public static final String CURL = "curl/";
-
   /**
    * Paramterized constructor to instantiate a configured instance of
    * {@link CrawlerDetection}
@@ -84,8 +70,8 @@ public CrawlerDetection(Properties props, ESDriver es, SparkDriver spark) {
     super(props, es, spark);
   }
 
-  public CrawlerDetection() {
-    super(null, null, null);
+  public CrawlerDetection(Properties props) {
+    super(props, null, null);
   }
 
   @Override
@@ -110,13 +96,11 @@ public Object execute() {
    * @return 1 if the log is initiated by crawler, 0 otherwise
    */
   public boolean checkKnownCrawler(String agent) {
-    agent = agent.toLowerCase();
-    if (agent.contains(CRAWLER) || agent.contains(GOOGLE_BOT) || agent.contains(BING_BOT) || agent.contains(APACHE_HHTP) || agent.contains(PERL_BOT) || agent.contains(YAHOO_BOT) || agent
-        .contains(YANDEX_BOT) || agent.contains(NO_AGENT_BOT) || agent.contains(PERL_BOT) || agent.contains(APACHE_HHTP) || agent.contains(JAVA_CLIENT) || agent.contains(CURL)) {
-      return true;
-    } else {
-      return false;
-    }
+    String[] crawlers = props.getProperty(MudrodConstants.BLACK_LIST_AGENT).split(",");
+    for (int i = 0; i < crawlers.length; i++) {
+      if (agent.toLowerCase().contains(crawlers[i].trim())) return true;
+    }  
+    return false;
   }
 
   public void checkByRate() throws InterruptedException, IOException {
@@ -138,7 +122,7 @@ public void checkByRate() throws InterruptedException, IOException {
   public void checkByRateInSequential() throws InterruptedException, IOException {
     es.createBulkProcessor();
 
-    int rate = Integer.parseInt(props.getProperty("sendingrate"));
+    int rate = Integer.parseInt(props.getProperty(MudrodConstants.REQUEST_RATE));
 
     Terms users = this.getUserTerms(this.httpType);
     LOG.info("Original User count: {}", Integer.toString(users.getBuckets().size()));
@@ -178,7 +162,7 @@ void checkByRateInParallel() throws InterruptedException, IOException {
 
   private int checkByRate(ESDriver es, String user) {
 
-    int rate = Integer.parseInt(props.getProperty("sendingrate"));
+    int rate = Integer.parseInt(props.getProperty(MudrodConstants.REQUEST_RATE));
     Pattern pattern = Pattern.compile("get (.*?) http/*");
     Matcher matcher;
 
@@ -202,13 +186,13 @@ private int checkByRate(ESDriver es, String user) {
         for (SearchHit hit : scrollResp.getHits().getHits()) {
           Map<String, Object> result = hit.getSource();
           String logtype = (String) result.get("LogType");
-          if (logtype.equals("PO.DAAC")) {
+          if (logtype.equals(MudrodConstants.HTTP_LOG)) {
             String request = (String) result.get("Request");
             matcher = pattern.matcher(request.trim().toLowerCase());
             boolean find = false;
             while (matcher.find()) {
               request = matcher.group(1);
-              result.put("RequestUrl", "http://podaac.jpl.nasa.gov" + request);
+              result.put("RequestUrl", props.getProperty(MudrodConstants.BASE_URL) + request);
               find = true;
             }
             if (!find) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
index cd41fbe..8e57b50 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/HistoryGenerator.java
@@ -60,9 +60,7 @@ public Object execute() {
    */
   public void generateBinaryMatrix() {
     try {
-
-      System.out.println(props.getProperty("userHistoryMatrix"));
-      File file = new File(props.getProperty("userHistoryMatrix"));
+      File file = new File(props.getProperty(MudrodConstants.USER_HISTORY_PATH));
       if (file.exists()) {
         file.delete();
       }
@@ -79,13 +77,21 @@ public void generateBinaryMatrix() {
       String[] logIndices = logIndexList.toArray(new String[0]);
       String[] statictypeArray = new String[] { this.sessionStats };
       int docCount = es.getDocCount(logIndices, statictypeArray);
+      
+      LOG.info(this.sessionStats + ":" + docCount);      
+      if (docCount==0) 
+      { 
+        bw.close(); 
+        file.delete();
+        return;
+      }
 
       SearchResponse sr = es.getClient().prepareSearch(logIndices).setTypes(statictypeArray).setQuery(QueryBuilders.matchAllQuery()).setSize(0)
           .addAggregation(AggregationBuilders.terms("IPs").field("IP").size(docCount)).execute().actionGet();
       Terms ips = sr.getAggregations().get("IPs");
       List<String> ipList = new ArrayList<>();
       for (Terms.Bucket entry : ips.getBuckets()) {
-        if (entry.getDocCount() > Integer.parseInt(props.getProperty(MudrodConstants.MINI_USER_HISTORY))) { // filter
+        if (entry.getDocCount() >= Integer.parseInt(props.getProperty(MudrodConstants.QUERY_MIN))) { // filter
           // out
           // less
           // active users/ips
@@ -107,15 +113,15 @@ public void generateBinaryMatrix() {
         Terms ipAgg = keyword.getAggregations().get("IPAgg");
 
         int distinctUser = ipAgg.getBuckets().size();
-        if (distinctUser > Integer.parseInt(props.getProperty(MudrodConstants.MINI_USER_HISTORY))) {
+        if (distinctUser >= Integer.parseInt(props.getProperty(MudrodConstants.QUERY_MIN))) {
           bw.write(keyword.getKey() + ",");
           for (Terms.Bucket IP : ipAgg.getBuckets()) {
 
             ipMap.put(IP.getKey().toString(), 1);
           }
-          for (String anIpList : ipList) {
-            if (ipMap.containsKey(anIpList)) {
-              bw.write(ipMap.get(anIpList) + ",");
+          for (int i = 0; i < ipList.size(); i++) {
+            if (ipMap.containsKey(ipList.get(i))) {
+              bw.write(ipMap.get(ipList.get(i)) + ",");
             } else {
               bw.write("0,");
             }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
index 933b061..a3fa870 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/ImportLogFile.java
@@ -188,14 +188,14 @@ public void readFileInParallel(String httplogpath, String ftplogpath) {
 
   public void importHttpfile(String httplogpath) {
     // import http logs
-    JavaRDD<String> accessLogs = spark.sc.textFile(httplogpath, this.partition).map(s -> ApacheAccessLog.parseFromLogLine(s)).filter(ApacheAccessLog::checknull);
+    JavaRDD<String> accessLogs = spark.sc.textFile(httplogpath, this.partition).map(s -> ApacheAccessLog.parseFromLogLine(s, props)).filter(ApacheAccessLog::checknull);
 
     JavaEsSpark.saveJsonToEs(accessLogs, logIndex + "/" + this.httpType);
   }
 
   public void importFtpfile(String ftplogpath) {
     // import ftp logs
-    JavaRDD<String> ftpLogs = spark.sc.textFile(ftplogpath, this.partition).map(s -> FtpLog.parseFromLogLine(s)).filter(FtpLog::checknull);
+    JavaRDD<String> ftpLogs = spark.sc.textFile(ftplogpath, this.partition).map(s -> FtpLog.parseFromLogLine(s, props)).filter(FtpLog::checknull);
 
     JavaEsSpark.saveJsonToEs(ftpLogs, logIndex + "/" + this.ftpType);
   }
@@ -263,7 +263,7 @@ public void parseSingleLineFTP(String log, String index, String type) {
       IndexRequest ir;
       try {
         ir = new IndexRequest(index, type)
-            .source(jsonBuilder().startObject().field("LogType", "ftp").field("IP", ip).field("Time", date).field("Request", request).field("Bytes", Long.parseLong(bytes)).endObject());
+            .source(jsonBuilder().startObject().field("LogType", MudrodConstants.FTP_LOG).field("IP", ip).field("Time", date).field("Request", request).field("Bytes", Long.parseLong(bytes)).endObject());
         es.getBulkProcessor().add(ir);
       } catch (NumberFormatException e) {
         LOG.error("Error whilst processing numbers", e);
@@ -306,9 +306,9 @@ public void parseSingleLineHTTP(String log, String index, String type) {
     CrawlerDetection crawlerDe = new CrawlerDetection(this.props, this.es, this.spark);
     if (!crawlerDe.checkKnownCrawler(agent)) {
       boolean tag = false;
-      String[] mimeTypes = { ".js", ".css", ".jpg", ".png", ".ico", "image_captcha", "autocomplete", ".gif", "/alldata/", "/api/", "get / http/1.1", ".jpeg", "/ws/" };
-      for (String mimeType : mimeTypes) {
-        if (request.contains(mimeType)) {
+      String[] mimeTypes = props.getProperty(MudrodConstants.BLACK_LIST_REQUEST).split(",");
+      for (int i = 0; i < mimeTypes.length; i++) {
+        if (request.contains(mimeTypes[i].trim())) {
           tag = true;
           break;
         }
@@ -325,7 +325,7 @@ private void executeBulkRequest(IndexRequest ir, String index, String type, Matc
     IndexRequest newIr = ir;
     try {
       newIr = new IndexRequest(index, type).source(
-          jsonBuilder().startObject().field("LogType", "PO.DAAC").field("IP", matcher.group(1)).field("Time", date).field("Request", matcher.group(5)).field("Response", matcher.group(6))
+          jsonBuilder().startObject().field("LogType", MudrodConstants.HTTP_LOG).field("IP", matcher.group(1)).field("Time", date).field("Request", matcher.group(5)).field("Response", matcher.group(6))
               .field("Bytes", Integer.parseInt(bytes)).field("Referer", matcher.group(8)).field("Browser", matcher.group(9)).endObject());
 
       es.getBulkProcessor().add(newIr);
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
index 3fcc67f..e5fb29c 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/LogAbstract.java
@@ -1,6 +1,5 @@
 package org.apache.sdap.mudrod.weblog.pre;
 
-import org.apache.commons.io.IOUtils;
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
@@ -8,6 +7,7 @@
 import org.apache.sdap.mudrod.weblog.partition.KGreedyPartitionSolver;
 import org.apache.sdap.mudrod.weblog.partition.ThePartitionProblemSolver;
 import org.apache.sdap.mudrod.weblog.partition.logPartitioner;
+import org.apache.commons.io.IOUtils;
 import org.apache.spark.Partition;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
@@ -47,17 +47,17 @@
 
   public LogAbstract(Properties props, ESDriver es, SparkDriver spark) {
     super(props, es, spark);
-    if (props != null) {
+    if (props != null && es != null) {
       initLogIndex();
     }
   }
 
   protected void initLogIndex() {
     logIndex = props.getProperty(MudrodConstants.LOG_INDEX) + props.getProperty(MudrodConstants.TIME_SUFFIX);
-    httpType = props.getProperty(MudrodConstants.HTTP_TYPE_PREFIX);
-    ftpType = props.getProperty(MudrodConstants.FTP_TYPE_PREFIX);
-    cleanupType = props.getProperty(MudrodConstants.CLEANUP_TYPE_PREFIX);
-    sessionStats = props.getProperty(MudrodConstants.SESSION_STATS_PREFIX);
+    httpType = MudrodConstants.HTTP_TYPE;
+    ftpType = MudrodConstants.FTP_TYPE;
+    cleanupType = MudrodConstants.CLEANUP_TYPE;
+    sessionStats = MudrodConstants.SESSION_STATS_TYPE;
 
     InputStream settingsStream = getClass().getClassLoader().getResourceAsStream(ES_SETTINGS);
     InputStream mappingsStream = getClass().getClassLoader().getResourceAsStream(ES_MAPPINGS);
@@ -146,12 +146,16 @@ public Terms getUserTerms(String... type) {
     Map<String, Long> userList = new HashMap<>();
     for (Terms.Bucket user : users.getBuckets()) {
       String ip = (String) user.getKey();
+
       System.out.println(ip);
+
       Histogram agg = user.getAggregations().get("by_day");
       List<? extends Histogram.Bucket> dateList = agg.getBuckets();
-      for (Histogram.Bucket aDateList : dateList) {
-        Long count = aDateList.getDocCount();
-        String date = aDateList.getKey().toString();
+      int size = dateList.size();
+      for (int i = 0; i < size; i++) {
+        Long count = dateList.get(i).getDocCount();
+        String date = dateList.get(i).getKey().toString();
+
         System.out.println(date);
         System.out.println(count);
       }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
index d884bf9..d386454 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionGenerator.java
@@ -75,11 +75,11 @@ public Object execute() {
   public void generateSession() {
     try {
       es.createBulkProcessor();
-      genSessionByReferer(Integer.parseInt(props.getProperty("timegap")));
+      genSessionByReferer(Integer.parseInt(props.getProperty(MudrodConstants.REQUEST_TIME_GAP)));
       es.destroyBulkProcessor();
 
       es.createBulkProcessor();
-      combineShortSessions(Integer.parseInt(props.getProperty("timegap")));
+      combineShortSessions(Integer.parseInt(props.getProperty(MudrodConstants.REQUEST_TIME_GAP)));
       es.destroyBulkProcessor();
     } catch (ElasticsearchException e) {
       LOG.error("Error whilst executing bulk processor.", e);
@@ -241,7 +241,7 @@ public int genSessionByReferer(ESDriver es, String user, int timeThres) throws E
     String logType = "";
     String id = "";
     String ip = user;
-    String indexUrl = "http://podaac.jpl.nasa.gov/";
+    String indexUrl = props.getProperty(MudrodConstants.BASE_URL) + "/";
     DateTime time = null;
     DateTimeFormatter fmt = ISODateTimeFormat.dateTime();
 
@@ -254,7 +254,7 @@ public int genSessionByReferer(ESDriver es, String user, int timeThres) throws E
         time = fmt.parseDateTime((String) result.get("Time"));
         id = hit.getId();
 
-        if ("PO.DAAC".equals(logType)) {
+        if (MudrodConstants.HTTP_LOG.equals(logType)) {
           if ("-".equals(referer) || referer.equals(indexUrl) || !referer.contains(indexUrl)) {
             sessionCountIn++;
             sessionReqs.put(ip + "@" + sessionCountIn, new HashMap<String, DateTime>());
@@ -313,7 +313,7 @@ public int genSessionByReferer(ESDriver es, String user, int timeThres) throws E
               }
             }
           }
-        } else if ("ftp".equals(logType)) {
+        } else if (MudrodConstants.FTP_LOG.equals(logType)) {
 
           // may affect computation efficiency
           Map<String, DateTime> requests = sessionReqs.get(ip + "@" + sessionCountIn);
@@ -435,6 +435,7 @@ public void combineShortSessions(ESDriver es, String user, int timeThres) throws
             }
           }
         }
+        ;
       }
       lastoldID = s.getID();
       lastnewID = s.getNewID();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
index f084a90..1e9709a 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/SessionStatistic.java
@@ -82,25 +82,25 @@ public Object execute() {
 
   public void processSession() throws InterruptedException, IOException, ExecutionException {
     String processingType = props.getProperty(MudrodConstants.PROCESS_TYPE);
-    if ("sequential".equals(processingType)) {
+    if (processingType.equals("sequential")) {
       processSessionInSequential();
-    } else if ("parallel".equals(processingType)) {
+    } else if (processingType.equals("parallel")) {
       processSessionInParallel();
     }
   }
 
   public void processSessionInSequential() throws IOException, InterruptedException, ExecutionException {
     es.createBulkProcessor();
-    Terms sessions = this.getSessionTerms();
-    int sessionCount = 0;
-    for (Terms.Bucket entry : sessions.getBuckets()) {
-      if (entry.getDocCount() >= 3 && !"invalid".equals(entry.getKey())) {
+    Terms Sessions = this.getSessionTerms();
+    int session_count = 0;
+    for (Terms.Bucket entry : Sessions.getBuckets()) {
+      if (entry.getDocCount() >= 3 && !entry.getKey().equals("invalid")) {
         String sessionid = entry.getKey().toString();
         int sessionNum = processSession(es, sessionid);
-        sessionCount += sessionNum;
+        session_count += sessionNum;
       }
     }
-    LOG.info("Final Session count: {}", Integer.toString(sessionCount));
+    LOG.info("Final Session count: {}", Integer.toString(session_count));
     es.destroyBulkProcessor();
   }
 
@@ -111,7 +111,7 @@ public void processSessionInSequential() throws IOException, InterruptedExceptio
    * @return dataset ID
    */
   public String findDataset(String request) {
-    String pattern1 = "/dataset/";
+    String pattern1 = props.getProperty(MudrodConstants.VIEW_MARKER);
     String pattern2;
     if (request.contains("?")) {
       pattern2 = "?";
@@ -138,7 +138,7 @@ public void processSessionInParallel() throws InterruptedException, IOException
       public Iterator<Integer> call(Iterator<String> arg0) throws Exception {
         ESDriver tmpES = new ESDriver(props);
         tmpES.createBulkProcessor();
-        List<Integer> sessionNums = new ArrayList<>();
+        List<Integer> sessionNums = new ArrayList<Integer>();
         sessionNums.add(0);
         while (arg0.hasNext()) {
           String s = arg0.next();
@@ -170,17 +170,17 @@ public int processSession(ESDriver es, String sessionId) throws IOException, Int
     DateTime start = null;
     DateTime end = null;
     int duration = 0;
-    float requestRate = 0;
+    float request_rate = 0;
 
-    int sessionCount = 0;
+    int session_count = 0;
     Pattern pattern = Pattern.compile("get (.*?) http/*");
 
     StatsAggregationBuilder statsAgg = AggregationBuilders.stats("Stats").field("Time");
 
-    BoolQueryBuilder filterSearch = new BoolQueryBuilder();
-    filterSearch.must(QueryBuilders.termQuery("SessionID", sessionId));
+    BoolQueryBuilder filter_search = new BoolQueryBuilder();
+    filter_search.must(QueryBuilders.termQuery("SessionID", sessionId));
 
-    SearchResponse sr = es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filterSearch).addAggregation(statsAgg).execute().actionGet();
+    SearchResponse sr = es.getClient().prepareSearch(logIndex).setTypes(inputType).setQuery(filter_search).addAggregation(statsAgg).execute().actionGet();
 
     Stats agg = sr.getAggregations().get("Stats");
     min = agg.getMinAsString();
@@ -190,24 +190,18 @@ public int processSession(ESDriver es, String sessionId) throws IOException, Int
 
     duration = Seconds.secondsBetween(start, end).getSeconds();
 
-    int searchDataListRequestCount = 0;
-    int searchDataRequestCount = 0;
-    int searchDataListRequestByKeywordsCount = 0;
-    int ftpRequestCount = 0;
-    int keywordsNum = 0;
+    int searchDataListRequest_count = 0;
+    int searchDataRequest_count = 0;
+    int searchDataListRequest_byKeywords_count = 0;
+    int ftpRequest_count = 0;
+    int keywords_num = 0;
 
-    String iP = null;
+    String IP = null;
     String keywords = "";
     String views = "";
     String downloads = "";
 
-    SearchResponse scrollResp = es.getClient()
-            .prepareSearch(logIndex)
-            .setTypes(inputType)
-            .setScroll(new TimeValue(60000))
-            .setQuery(filterSearch)
-            .setSize(100)
-            .execute().actionGet();
+    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(inputType).setScroll(new TimeValue(60000)).setQuery(filter_search).setSize(100).execute().actionGet();
 
     while (true) {
       for (SearchHit hit : scrollResp.getHits().getHits()) {
@@ -215,30 +209,30 @@ public int processSession(ESDriver es, String sessionId) throws IOException, Int
 
         String request = (String) result.get("Request");
         String logType = (String) result.get("LogType");
-        iP = (String) result.get("IP");
+        IP = (String) result.get("IP");
         Matcher matcher = pattern.matcher(request.trim().toLowerCase());
         while (matcher.find()) {
           request = matcher.group(1);
         }
 
-        String datasetlist = "/datasetlist?";
-        String dataset = "/dataset/";
+        String datasetlist = props.getProperty(MudrodConstants.SEARCH_MARKER);
+        String dataset = props.getProperty(MudrodConstants.VIEW_MARKER);
         if (request.contains(datasetlist)) {
-          searchDataListRequestCount++;
+          searchDataListRequest_count++;
 
           RequestUrl requestURL = new RequestUrl();
           String infoStr = requestURL.getSearchInfo(request) + ",";
-          String info = es.customAnalyzing(props.getProperty("indexName"), infoStr);
+          String info = es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), infoStr);
 
-          if (!",".equals(info)) {
-            if ("".equals(keywords)) {
+          if (!info.equals(",")) {
+            if (keywords.equals("")) {
               keywords = keywords + info;
             } else {
               String[] items = info.split(",");
               String[] keywordList = keywords.split(",");
-              for (String item : items) {
-                if (!Arrays.asList(keywordList).contains(item)) {
-                  keywords = keywords + item + ",";
+              for (int m = 0; m < items.length; m++) {
+                if (!Arrays.asList(keywordList).contains(items[m])) {
+                  keywords = keywords + items[m] + ",";
                 }
               }
             }
@@ -246,36 +240,30 @@ public int processSession(ESDriver es, String sessionId) throws IOException, Int
 
         }
         if (request.startsWith(dataset)) {
-          searchDataRequestCount++;
-          if (findDataset(request) != null) {
+          searchDataRequest_count++;
+          if (findDataset(request) != null) 
+          {
             String view = findDataset(request);
-
-            if ("".equals(views)) {
+            if (views.equals("")) 
               views = view;
-            } else {
-              if (views.contains(view)) {
-
-              } else {
-                views = views + "," + view;
-              }
-            }
-          }
+             else if (!views.contains(view)) 
+                views = views + "," + view;          
+           }          
         }
-        if ("ftp".equals(logType)) {
-          ftpRequestCount++;
+        if (MudrodConstants.FTP_LOG.equals(logType)) {
+          ftpRequest_count++;
           String download = "";
           String requestLowercase = request.toLowerCase();
-          if (!requestLowercase.endsWith(".jpg") && 
-                  !requestLowercase.endsWith(".pdf") && 
-                  !requestLowercase.endsWith(".txt") && 
-                  !requestLowercase.endsWith(".gif")) {
+          if (requestLowercase.endsWith(".jpg") == false && requestLowercase.endsWith(".pdf") == false && requestLowercase.endsWith(".txt") == false && requestLowercase.endsWith(".gif") == false) {
             download = request;
           }
 
           if ("".equals(downloads)) {
             downloads = download;
           } else {
-            if (!downloads.contains(download)) {
+            if (downloads.contains(download)) {
+
+            } else {
               downloads = downloads + "," + download;
             }
           }
@@ -290,43 +278,29 @@ public int processSession(ESDriver es, String sessionId) throws IOException, Int
       }
     }
 
-    if (!"".equals(keywords)) {
-      keywordsNum = keywords.split(",").length;
+    if (!keywords.equals("")) {
+      keywords_num = keywords.split(",").length;
     }
 
-    if (searchDataListRequestCount != 0
-            && searchDataListRequestCount <= Integer.parseInt(props.getProperty("searchf"))
-            && searchDataRequestCount != 0
-            && searchDataRequestCount <= Integer.parseInt(props.getProperty("viewf"))
-            && ftpRequestCount <= Integer.parseInt(props.getProperty("downloadf"))) {
-      String sessionURL = props.getProperty("SessionPort") + props.getProperty("SessionUrl") + "?sessionid=" + sessionId + "&sessionType=" + outputType + "&requestType=" + inputType;
-      sessionCount = 1;
+    if (searchDataListRequest_count != 0 && 
+        searchDataListRequest_count <= Integer.parseInt(props.getProperty(MudrodConstants.SEARCH_F)) && 
+        searchDataRequest_count != 0 && 
+        searchDataRequest_count <= Integer.parseInt(props.getProperty(MudrodConstants.VIEW_F)) && 
+        ftpRequest_count <= Integer.parseInt(props.getProperty(MudrodConstants.DOWNLOAD_F))) 
+    {
+      String sessionURL = props.getProperty(MudrodConstants.SESSION_PORT) + props.getProperty(MudrodConstants.SESSION_URL) + "?sessionid=" + sessionId + "&sessionType=" + outputType + "&requestType=" + inputType;
+      session_count = 1;
 
       IndexRequest ir = new IndexRequest(logIndex, outputType).source(
-              jsonBuilder().startObject()
-              .field("SessionID", sessionId)
-              .field("SessionURL", sessionURL)
-              .field("Duration", duration)
-              .field("Number of Keywords", keywordsNum)
-              .field("Time", min)
-              .field("End_time", max)
-              .field("searchDataListRequest_count", searchDataListRequestCount)
-              .field("searchDataListRequest_byKeywords_count", searchDataListRequestByKeywordsCount)
-              .field("searchDataRequest_count", searchDataRequestCount)
-              .field("keywords", es.customAnalyzing(logIndex, keywords))
-              .field("views", views)
-              .field("downloads", downloads)
-              .field("request_rate", requestRate)
-              .field("Comments", "")
-              .field("Validation", 0)
-              .field("Produceby", 0)
-              .field("Correlation", 0)
-              .field("IP", iP).endObject());
+          jsonBuilder().startObject().field("SessionID", sessionId).field("SessionURL", sessionURL).field("Duration", duration).field("Number of Keywords", keywords_num).field("Time", min)
+              .field("End_time", max).field("searchDataListRequest_count", searchDataListRequest_count).field("searchDataListRequest_byKeywords_count", searchDataListRequest_byKeywords_count)
+              .field("searchDataRequest_count", searchDataRequest_count).field("keywords", es.customAnalyzing(logIndex, keywords)).field("views", views).field("downloads", downloads)
+              .field("request_rate", request_rate).field("Comments", "").field("Validation", 0).field("Produceby", 0).field("Correlation", 0).field("IP", IP).endObject());
 
       es.getBulkProcessor().add(ir);
     }
 
-    return sessionCount;
+    return session_count;
   }
 
   @Override
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/package-info.java
index 79b9c2b..5bf7f27 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/pre/package-info.java
@@ -15,4 +15,4 @@
  * This package includes Preprocessing for all functionality required by the
  * {@link org.apache.sdap.mudrod.discoveryengine.WeblogDiscoveryEngine}
  */
-package org.apache.sdap.mudrod.weblog.pre;
+package org.apache.sdap.mudrod.weblog.pre;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/ClickStreamAnalyzer.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/ClickStreamAnalyzer.java
index 68fad4d..70c4067 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/ClickStreamAnalyzer.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/ClickStreamAnalyzer.java
@@ -16,6 +16,7 @@
 import org.apache.sdap.mudrod.discoveryengine.DiscoveryStepAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.semantics.SVDAnalyzer;
 import org.apache.sdap.mudrod.ssearch.ClickstreamImporter;
 import org.apache.sdap.mudrod.utils.LinkageTriple;
@@ -50,13 +51,15 @@ public Object execute() {
     LOG.info("Starting ClickStreamAnalyzer...");
     startTime = System.currentTimeMillis();
     try {
-      String clickstream_matrixFile = props.getProperty("clickstreamMatrix");
+      String clickstream_matrixFile = props.getProperty(MudrodConstants.CLICKSTREAM_PATH);
       File f = new File(clickstream_matrixFile);
       if (f.exists()) {
         SVDAnalyzer svd = new SVDAnalyzer(props, es, spark);
-        svd.getSVDMatrix(props.getProperty("clickstreamMatrix"), Integer.parseInt(props.getProperty("clickstreamSVDDimension")), props.getProperty("clickstreamSVDMatrix_tmp"));
-        List<LinkageTriple> tripleList = svd.calTermSimfromMatrix(props.getProperty("clickstreamSVDMatrix_tmp"));
-        svd.saveToES(tripleList, props.getProperty("indexName"), props.getProperty("clickStreamLinkageType"));
+        svd.getSVDMatrix(clickstream_matrixFile, 
+            Integer.parseInt(props.getProperty(MudrodConstants.CLICKSTREAM_SVD_DIM)), 
+            props.getProperty(MudrodConstants.CLICKSTREAM_SVD_PATH));
+        List<LinkageTriple> tripleList = svd.calTermSimfromMatrix(props.getProperty(MudrodConstants.CLICKSTREAM_SVD_PATH));
+        svd.saveToES(tripleList, props.getProperty(MudrodConstants.ES_INDEX_NAME), MudrodConstants.CLICK_STREAM_LINKAGE_TYPE);
       
         // Store click stream in ES for the ranking use
         ClickstreamImporter cs = new ClickstreamImporter(props, es, spark);
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/UserHistoryAnalyzer.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/UserHistoryAnalyzer.java
index d95475c..c4c5da4 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/UserHistoryAnalyzer.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/UserHistoryAnalyzer.java
@@ -49,7 +49,7 @@ public Object execute() {
     startTime = System.currentTimeMillis();
 
     SemanticAnalyzer sa = new SemanticAnalyzer(props, es, spark);
-    List<LinkageTriple> tripleList = sa.calTermSimfromMatrix(props.getProperty("userHistoryMatrix"));
+    List<LinkageTriple> tripleList = sa.calTermSimfromMatrix(props.getProperty(MudrodConstants.USER_HISTORY_PATH));
     sa.saveToES(tripleList, props.getProperty(MudrodConstants.ES_INDEX_NAME), props.getProperty(MudrodConstants.USE_HISTORY_LINKAGE_TYPE));
 
     endTime = System.currentTimeMillis();
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/package-info.java
index a62b6ef..a6b55f4 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/process/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/process/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes web log processing classes.
  */
-package org.apache.sdap.mudrod.weblog.process;
+package org.apache.sdap.mudrod.weblog.process;
\ No newline at end of file
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
index 985b2d7..d28524e 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/ApacheAccessLog.java
@@ -15,35 +15,28 @@
 
 import com.google.gson.Gson;
 
+import org.apache.sdap.mudrod.main.MudrodConstants;
+import org.apache.sdap.mudrod.weblog.pre.CrawlerDetection;
+
 import java.io.IOException;
 import java.io.Serializable;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Properties;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import org.apache.sdap.mudrod.weblog.pre.CrawlerDetection;
-
 /**
  * This class represents an Apache access log line. See
  * http://httpd.apache.org/docs/2.2/logs.html for more details.
  */
 public class ApacheAccessLog extends WebLog implements Serializable {
 
-
-  /**
-   * 
-   */
-  private static final long serialVersionUID = 1L;
-
-  public ApacheAccessLog() {
-    //default constructor
-  }
-
-  String response;
-  String referer;
-  String browser;
+  // double Bytes;
+  String Response;
+  String Referer;
+  String Browser;
 
   @Override
   public double getBytes() {
@@ -51,28 +44,31 @@ public double getBytes() {
   }
 
   public String getBrowser() {
-    return this.browser;
+    return this.Browser;
   }
 
   public String getResponse() {
-    return this.response;
+    return this.Response;
   }
 
   public String getReferer() {
-    return this.referer;
+    return this.Referer;
   }
 
+  public ApacheAccessLog() {
+	  super();
+  }
 
-  public static String parseFromLogLine(String log) throws IOException, ParseException {
+  public static String parseFromLogLine(String log, Properties props) throws IOException, ParseException {
 
     String logEntryPattern = "^([\\d.]+) (\\S+) (\\S+) \\[([\\w:/]+\\s[+\\-]\\d{4})\\] \"(.+?)\" (\\d{3}) (\\d+|-) \"((?:[^\"]|\")+)\" \"([^\"]+)\"";
-    final int numFields = 9;
+    final int NUM_FIELDS = 9;
     Pattern p = Pattern.compile(logEntryPattern);
     Matcher matcher;
 
     String lineJson = "{}";
     matcher = p.matcher(log);
-    if (!matcher.matches() || numFields != matcher.groupCount()) {
+    if (!matcher.matches() || NUM_FIELDS != matcher.groupCount()) {
       return lineJson;
     }
 
@@ -83,40 +79,48 @@ public static String parseFromLogLine(String log) throws IOException, ParseExcep
 
     String bytes = matcher.group(7);
 
-    if ("-".equals(bytes)) {
+    if (bytes.equals("-")) {
       bytes = "0";
     }
 
     String request = matcher.group(5).toLowerCase();
     String agent = matcher.group(9);
-    CrawlerDetection crawlerDe = new CrawlerDetection();
+    CrawlerDetection crawlerDe = new CrawlerDetection(props);
     if (crawlerDe.checkKnownCrawler(agent)) {
       return lineJson;
     } else {
 
-      String[] mimeTypes = { ".js", ".css", ".jpg", ".png", ".ico", "image_captcha", "autocomplete", ".gif", "/alldata/", "/api/", "get / http/1.1", ".jpeg", "/ws/" };
-      for (String mimeType : mimeTypes) {
-        if (request.contains(mimeType)) {
+      boolean tag = false;
+      
+      String[] mimeTypes = props.getProperty(MudrodConstants.BLACK_LIST_REQUEST).split(",");
+      for (int i = 0; i < mimeTypes.length; i++) {
+        if (request.contains(mimeTypes[i].trim())) {
+          tag = true;
           return lineJson;
         }
       }
 
-      ApacheAccessLog accesslog = new ApacheAccessLog();
-      accesslog.LogType = "PO.DAAC";
-      accesslog.IP = matcher.group(1);
-      accesslog.Request = matcher.group(5);
-      accesslog.response = matcher.group(6);
-      accesslog.Bytes = Double.parseDouble(bytes);
-      accesslog.referer = matcher.group(8);
-      accesslog.browser = matcher.group(9);
-      SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.sss'Z'");
-      accesslog.Time = df.format(date);
-
-      Gson gson = new Gson();
-      lineJson = gson.toJson(accesslog);
-
-      return lineJson;
+      if (tag == false) {
+        ApacheAccessLog accesslog = new ApacheAccessLog();
+        accesslog.LogType = MudrodConstants.HTTP_LOG;
+        accesslog.IP = matcher.group(1);
+        accesslog.Request = matcher.group(5);
+        accesslog.Response = matcher.group(6);
+        accesslog.Bytes = Double.parseDouble(bytes);
+        accesslog.Referer = matcher.group(8);
+        accesslog.Browser = matcher.group(9);
+        SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.sss'Z'");
+        accesslog.Time = df.format(date);
+
+        Gson gson = new Gson();
+        lineJson = gson.toJson(accesslog);
+
+        return lineJson;
+      }
     }
+
+    lineJson = "{}";
+    return lineJson;
   }
 
   public static boolean checknull(WebLog s) {
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/FtpLog.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/FtpLog.java
index 488fe52..9f39655 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/FtpLog.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/FtpLog.java
@@ -15,6 +15,7 @@
 
 import com.google.gson.Gson;
 
+import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.weblog.pre.ImportLogFile;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -22,6 +23,7 @@
 import java.io.Serializable;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Properties;
 
 /**
  * This class represents an FTP access log line.
@@ -29,8 +31,12 @@
 public class FtpLog extends WebLog implements Serializable {
 
   private static final Logger LOG = LoggerFactory.getLogger(ImportLogFile.class);
+  
+  public FtpLog() {
+	  super();
+  }
 
-  public static String parseFromLogLine(String log) {
+  public static String parseFromLogLine(String log, Properties props) {
 
     try {
       String ip = log.split(" +")[6];
@@ -47,7 +53,7 @@ public static String parseFromLogLine(String log) {
 
       if (!request.contains("/misc/") && !request.contains("readme")) {
         FtpLog ftplog = new FtpLog();
-        ftplog.LogType = "ftp";
+        ftplog.LogType = MudrodConstants.FTP_LOG;
         ftplog.IP = ip;
         ftplog.Request = request;
         ftplog.Bytes = Double.parseDouble(bytes);
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/GeoIp.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/GeoIp.java
index d3e94dc..9079ef5 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/GeoIp.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/GeoIp.java
@@ -13,11 +13,10 @@
  */
 package org.apache.sdap.mudrod.weblog.structure;
 
-import org.apache.sdap.mudrod.utils.HttpRequest;
-
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
+import org.apache.sdap.mudrod.utils.HttpRequest;
 
 /**
  * ClassName: GeoIp Function: convert IP to geo location
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/RequestUrl.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/RequestUrl.java
index f86438d..0476095 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/RequestUrl.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/RequestUrl.java
@@ -117,7 +117,7 @@ private static String truncateUrlPage(String strURL) {
   }
 
   /**
-   * GetSearchInfo: Get search information from url link
+   * GetSearchInfo: Get search information/keywords from url link
    *
    * @param URL request url
    * @return search params
@@ -187,34 +187,34 @@ public String getSearchInfo(String URL) throws UnsupportedEncodingException {
   }
 
   /**
-   * GetSearchWord: Get search words from url link
+   * GetSearchWord: Initial version of get search words from url link
    *
    * @param url request url
    * @return query
    */
-  public static String getSearchWord(String url) {
-    String keyword = "";
-
-    Map<String, String> mapRequest = RequestUrl.uRLRequest(url);
-    if (mapRequest.get("search") != null) {
-      try {
-        keyword = mapRequest.get("search");
-
-        keyword = URLDecoder.decode(keyword.replaceAll("%(?![0-9a-fA-F]{2})", "%25"), "UTF-8");
-        if (keyword.contains("%2b") || keyword.contains("%20") || keyword.contains("%25")) {
-          keyword = keyword.replace("%2b", " ");
-          keyword = keyword.replace("%20", " ");
-          keyword = keyword.replace("%25", " ");
-        }
-        keyword = keyword.replaceAll("[-+^:,*_\"]", " ").replace("\\", " ").replaceAll("\\s+", " ").trim();
-      } catch (UnsupportedEncodingException e) {
-        LOG.error(mapRequest.get("search"));
-        e.printStackTrace();
-      }
-    }
-
-    return keyword;
-  }
+//  public static String getSearchWord(String url) {
+//    String keyword = "";
+//
+//    Map<String, String> mapRequest = RequestUrl.uRLRequest(url);
+//    if (mapRequest.get("search") != null) {
+//      try {
+//        keyword = mapRequest.get("search");
+//
+//        keyword = URLDecoder.decode(keyword.replaceAll("%(?![0-9a-fA-F]{2})", "%25"), "UTF-8");
+//        if (keyword.contains("%2b") || keyword.contains("%20") || keyword.contains("%25")) {
+//          keyword = keyword.replace("%2b", " ");
+//          keyword = keyword.replace("%20", " ");
+//          keyword = keyword.replace("%25", " ");
+//        }
+//        keyword = keyword.replaceAll("[-+^:,*_\"]", " ").replace("\\", " ").replaceAll("\\s+", " ").trim();
+//      } catch (UnsupportedEncodingException e) {
+//        LOG.error(mapRequest.get("search"));
+//        e.printStackTrace();
+//      }
+//    }
+//
+//    return keyword;
+//  }
 
   /**
    * GetFilterInfo: Get filter params from url link
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
index f11efc6..5b25cde 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/Session.java
@@ -16,8 +16,9 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.elasticsearch.action.search.SearchResponse;
 import org.elasticsearch.index.query.QueryBuilders;
 import org.elasticsearch.search.SearchHit;
@@ -214,7 +215,7 @@ private SessionTree getSessionTree(String indexName, String type, String session
       String logType = (String) result.get("LogType");
       String referer = (String) result.get("Referer");
 
-      SessionNode node = new SessionNode(request, logType, referer, time, seq);
+      SessionNode node = new SessionNode(request, logType, referer, props.getProperty(MudrodConstants.BASE_URL), time, seq);
       tree.insert(node);
       seq++;
     }
@@ -231,7 +232,7 @@ private SessionTree getSessionTree(String indexName, String type, String session
    * @throws UnsupportedEncodingException UnsupportedEncodingException
    */
   private JsonElement getRequests(String cleanuptype, String sessionID) throws UnsupportedEncodingException {
-    SearchResponse response = es.getClient().prepareSearch(props.getProperty("indexName")).setTypes(cleanuptype).setQuery(QueryBuilders.termQuery("SessionID", sessionID)).setSize(100)
+    SearchResponse response = es.getClient().prepareSearch(props.getProperty(MudrodConstants.ES_INDEX_NAME)).setTypes(cleanuptype).setQuery(QueryBuilders.termQuery("SessionID", sessionID)).setSize(100)
         .addSort("Time", SortOrder.ASC).execute().actionGet();
 
     Gson gson = new Gson();
@@ -278,7 +279,7 @@ private JsonElement getRequests(String cleanuptype, String sessionID) throws Uns
 
     List<RankingTrainData> trainData = new ArrayList<>();
     try {
-      trainData = tree.getRankingTrainData(indexName);
+      trainData = tree.getRankingTrainData(indexName, sessionID);
     } catch (UnsupportedEncodingException e) {
       LOG.error("Error whilst retreiving ranking training data: {}", e);
     }
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
index 6adaf97..8a38a39 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionExtractor.java
@@ -16,6 +16,7 @@
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
@@ -95,13 +96,15 @@ public SessionExtractor() {
     List<String> logIndexList = es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
 
     List<ClickStream> result = new ArrayList<>();
-    for (String logIndex : logIndexList) {
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
       List<String> sessionIdList;
       try {
         sessionIdList = this.getSessions(props, es, logIndex);
         Session session = new Session(props, es);
-        for (String aSessionIdList : sessionIdList) {
-          String[] sArr = aSessionIdList.split(",");
+        int sessionNum = sessionIdList.size();
+        for (int i = 0; i < sessionNum; i++) {
+          String[] sArr = sessionIdList.get(i).split(",");
           List<ClickStream> datas = session.getClickStreamList(sArr[1], sArr[2], sArr[0]);
           result.addAll(datas);
         }
@@ -120,15 +123,15 @@ public SessionExtractor() {
     LOG.info("Retrieved {}", logIndexList.toString());
 
     List<String> sessionIdList = new ArrayList<>();
-    for (String logIndex : logIndexList) {
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
       List<String> tmpsessionList = this.getSessions(props, es, logIndex);
       sessionIdList.addAll(tmpsessionList);
     }
 
     JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
 
-    JavaRDD<ClickStream> clickStreamRDD = sessionRDD.mapPartitions(
-            new FlatMapFunction<Iterator<String>, ClickStream>() {
+    JavaRDD<ClickStream> clickStreamRDD = sessionRDD.mapPartitions(new FlatMapFunction<Iterator<String>, ClickStream>() {
       /**
        *
        */
@@ -247,17 +250,17 @@ public SessionExtractor() {
    */
   protected List<String> getSessions(Properties props, ESDriver es, String logIndex) {
 
-    String cleanupPrefix = props.getProperty(MudrodConstants.CLEANUP_TYPE_PREFIX);
-    String sessionStatPrefix = props.getProperty(MudrodConstants.SESSION_STATS_PREFIX);
+    String cleanupType = MudrodConstants.CLEANUP_TYPE;
+    String sessionStatType = MudrodConstants.SESSION_STATS_TYPE;
 
     List<String> sessions = new ArrayList<>();
-    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(sessionStatPrefix).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
+    SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(sessionStatType).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery()).setSize(100).execute()
             .actionGet();
     while (true) {
       for (SearchHit hit : scrollResp.getHits().getHits()) {
         Map<String, Object> session = hit.getSource();
         String sessionID = (String) session.get("SessionID");
-        sessions.add(sessionID + "," + logIndex + "," + cleanupPrefix);
+        sessions.add(sessionID + "," + logIndex + "," + cleanupType);
       }
 
       scrollResp = es.getClient().prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet();
@@ -380,8 +383,9 @@ public Boolean call(Tuple2<String, Double> arg0) throws Exception {
 
     List<String> result = new ArrayList<>();
     List<String> logIndexList = es.getIndexListWithPrefix(props.getProperty(MudrodConstants.LOG_INDEX));
-    for (String logIndex : logIndexList) {
-      SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(props.getProperty(MudrodConstants.SESSION_STATS_PREFIX)).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery())
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
+      SearchResponse scrollResp = es.getClient().prepareSearch(logIndex).setTypes(MudrodConstants.SESSION_STATS_TYPE).setScroll(new TimeValue(60000)).setQuery(QueryBuilders.matchAllQuery())
               .setSize(100).execute().actionGet();
       while (true) {
         for (SearchHit hit : scrollResp.getHits().getHits()) {
@@ -414,9 +418,11 @@ public Boolean call(Tuple2<String, Double> arg0) throws Exception {
 
         String items = splits[1];
         String[] itemArr = items.split(",");
-        for (String item : itemArr) {
+        int size = itemArr.length;
+        for (int i = 0; i < size; i++) {
+          String item = itemArr[i];
           if (!itemList.contains(item))
-            itemList.add(item);
+            itemList.add(itemArr[i]);
         }
 
         return new Tuple2<>(sessionId, itemList);
@@ -458,13 +464,15 @@ public Boolean call(Tuple2<String, Double> arg0) throws Exception {
     LOG.info(logIndexList.toString());
 
     List<RankingTrainData> result = new ArrayList<>();
-    for (String logIndex : logIndexList) {
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
       List<String> sessionIdList;
       try {
         sessionIdList = this.getSessions(props, es, logIndex);
         Session session = new Session(props, es);
-        for (String aSessionIdList : sessionIdList) {
-          String[] sArr = aSessionIdList.split(",");
+        int sessionNum = sessionIdList.size();
+        for (int i = 0; i < sessionNum; i++) {
+          String[] sArr = sessionIdList.get(i).split(",");
           List<RankingTrainData> datas = session.getRankingTrainData(sArr[1], sArr[2], sArr[0]);
           result.addAll(datas);
         }
@@ -483,15 +491,15 @@ public Boolean call(Tuple2<String, Double> arg0) throws Exception {
     LOG.info(logIndexList.toString());
 
     List<String> sessionIdList = new ArrayList<>();
-    for (String logIndex : logIndexList) {
+    for (int n = 0; n < logIndexList.size(); n++) {
+      String logIndex = logIndexList.get(n);
       List<String> tmpsessionList = this.getSessions(props, es, logIndex);
       sessionIdList.addAll(tmpsessionList);
     }
 
     JavaRDD<String> sessionRDD = spark.sc.parallelize(sessionIdList, 16);
 
-    JavaRDD<RankingTrainData> clickStreamRDD = sessionRDD.mapPartitions(
-            new FlatMapFunction<Iterator<String>, RankingTrainData>() {
+    JavaRDD<RankingTrainData> clickStreamRDD = sessionRDD.mapPartitions(new FlatMapFunction<Iterator<String>, RankingTrainData>() {
       /**
        *
        */
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
index 5e43f3e..2838063 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionNode.java
@@ -65,12 +65,12 @@ public SessionNode() {
    * @param time:    request time of node
    * @param seq:     sequence of this node
    */
-  public SessionNode(String request, String logType, String referer, String time, int seq) {
+  public SessionNode(String request, String logType, String referer, String basicUrl, String time, int seq) {
     this.logType = logType;
     this.time = time;
     this.seq = seq;
     this.setRequest(request);
-    this.setReferer(referer);
+    this.setReferer(referer, basicUrl);
     this.setKey(request, logType);
   }
 
@@ -79,12 +79,13 @@ public SessionNode(String request, String logType, String referer, String time,
    *
    * @param referer previous request url
    */
-  public void setReferer(String referer) {
+  public void setReferer(String referer, String basicUrl) {
     if (referer == null) {
       this.referer = "";
       return;
     }
-    this.referer = referer.toLowerCase().replace("http://podaac.jpl.nasa.gov", "");
+    //this.referer = referer.toLowerCase().replace("http://podaac.jpl.nasa.gov", "");
+	this.referer= referer.toLowerCase().replace(basicUrl, "");
   }
 
   /**
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
index ac547dc..3eaa529 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/SessionTree.java
@@ -16,18 +16,15 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.discoveryengine.MudrodAbstract;
 import org.apache.sdap.mudrod.driver.ESDriver;
+import org.apache.sdap.mudrod.main.MudrodConstants;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
+import java.util.*;
 import java.util.concurrent.ExecutionException;
 
 /**
@@ -66,7 +63,7 @@
    */
   public SessionTree(Properties props, ESDriver es, SessionNode rootData, String sessionID, String cleanupType) {
     super(props, es, null);
-    root = new SessionNode("root", "root", "", "", 0);
+    root = new SessionNode("root", "root", "", props.getProperty(MudrodConstants.BASE_URL), "", 0);
     tmpnode = root;
     this.sessionID = sessionID;
     this.cleanupType = cleanupType;
@@ -82,7 +79,7 @@ public SessionTree(Properties props, ESDriver es, SessionNode rootData, String s
    */
   public SessionTree(Properties props, ESDriver es, String sessionID, String cleanupType) {
     super(props, es, null);
-    root = new SessionNode("root", "root", "", "", 0);
+    root = new SessionNode("root", "root", "", props.getProperty(MudrodConstants.BASE_URL), "", 0);
     root.setParent(root);
     tmpnode = root;
     this.sessionID = sessionID;
@@ -97,16 +94,14 @@ public SessionTree(Properties props, ESDriver es, String sessionID, String clean
    */
   public SessionNode insert(SessionNode node) {
     // begin with datasetlist
-    if ("datasetlist".equals(node.getKey())) {
+    if (node.getKey().equals("datasetlist")) {
       this.binsert = true;
     }
     if (!this.binsert) {
       return null;
     }
     // remove unrelated node
-    if (!"datasetlist".equals(node.getKey()) &&
-            !"dataset".equals(node.getKey()) &&
-            !"ftp".equals(node.getKey())) {
+    if (!node.getKey().equals("datasetlist") && !node.getKey().equals("dataset") && !node.getKey().equals("ftp")) {
       return null;
     }
     // remove dumplicated click
@@ -192,7 +187,9 @@ public JsonObject treeToJson(SessionNode node) {
 
     List<ClickStream> clickthroughs = new ArrayList<>();
     List<SessionNode> viewnodes = this.getViewNodes(this.root);
-    for (SessionNode viewnode : viewnodes) {
+    for (int i = 0; i < viewnodes.size(); i++) {
+
+      SessionNode viewnode = viewnodes.get(i);
       SessionNode parent = viewnode.getParent();
       List<SessionNode> children = viewnode.getChildren();
 
@@ -204,14 +201,15 @@ public JsonObject treeToJson(SessionNode node) {
       String viewquery = "";
       try {
         String infoStr = requestURL.getSearchInfo(viewnode.getRequest());
-        viewquery = es.customAnalyzing(props.getProperty("indexName"), infoStr);
+        viewquery = es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), infoStr);
       } catch (UnsupportedEncodingException | InterruptedException | ExecutionException e) {
         LOG.warn("Exception getting search info. Ignoring...", e);
       }
 
       String dataset = viewnode.getDatasetId();
       boolean download = false;
-      for (SessionNode child : children) {
+      for (int j = 0; j < children.size(); j++) {
+        SessionNode child = children.get(j);
         if ("ftp".equals(child.getKey())) {
           download = true;
           break;
@@ -221,8 +219,8 @@ public JsonObject treeToJson(SessionNode node) {
       if (viewquery != null && !"".equals(viewquery)) {
         String[] queries = viewquery.trim().split(",");
         if (queries.length > 0) {
-          for (String query : queries) {
-            ClickStream data = new ClickStream(query, dataset, download);
+          for (int k = 0; k < queries.length; k++) {
+            ClickStream data = new ClickStream(queries[k], dataset, download);
             data.setSessionId(this.sessionID);
             data.setType(this.cleanupType);
             clickthroughs.add(data);
@@ -329,8 +327,8 @@ private SessionNode iterChild(SessionNode start, String refer) {
    * @return
    */
   private boolean check(List<SessionNode> children, String str) {
-    for (SessionNode aChildren : children) {
-      if (aChildren.key.equals(str)) {
+    for (int i = 0; i < children.size(); i++) {
+      if (children.get(i).key.equals(str)) {
         return true;
       }
     }
@@ -345,8 +343,8 @@ private boolean check(List<SessionNode> children, String str) {
    * @return
    */
   private boolean insertHelperChildren(SessionNode entry, List<SessionNode> children) {
-    for (SessionNode aChildren : children) {
-      boolean result = insertHelper(entry, aChildren);
+    for (int i = 0; i < children.size(); i++) {
+      boolean result = insertHelper(entry, children.get(i));
       if (result) {
         return result;
       }
@@ -450,26 +448,30 @@ private boolean insertHelper(SessionNode entry, SessionNode node) {
    * Obtain the ranking training data.
    *
    * @param indexName   the index from whcih to obtain the data
+   * @param sessionID   a valid session identifier
    * @return {@link ClickStream}
    * @throws UnsupportedEncodingException if there is an error whilst
    *                                      processing the ranking training data.
    */
-  public List<RankingTrainData> getRankingTrainData(String indexName) throws UnsupportedEncodingException {
+  public List<RankingTrainData> getRankingTrainData(String indexName, String sessionID) throws UnsupportedEncodingException {
 
     List<RankingTrainData> trainDatas = new ArrayList<>();
 
     List<SessionNode> queryNodes = this.getQueryNodes(this.root);
-    for (SessionNode querynode : queryNodes) {
+    for (int i = 0; i < queryNodes.size(); i++) {
+      SessionNode querynode = queryNodes.get(i);
       List<SessionNode> children = querynode.getChildren();
 
       LinkedHashMap<String, Boolean> datasetOpt = new LinkedHashMap<>();
       int ndownload = 0;
-      for (SessionNode node : children) {
+      for (int j = 0; j < children.size(); j++) {
+        SessionNode node = children.get(j);
         if ("dataset".equals(node.getKey())) {
           Boolean bDownload = false;
           List<SessionNode> nodeChildren = node.getChildren();
-          for (SessionNode aNodeChildren : nodeChildren) {
-            if ("ftp".equals(aNodeChildren.getKey())) {
+          int childSize = nodeChildren.size();
+          for (int k = 0; k < childSize; k++) {
+            if ("ftp".equals(nodeChildren.get(k).getKey())) {
               bDownload = true;
               ndownload += 1;
               break;
@@ -487,7 +489,7 @@ private boolean insertHelper(SessionNode entry, SessionNode node) {
         String infoStr = requestURL.getSearchInfo(queryUrl);
         String query = null;
         try {
-          query = es.customAnalyzing(props.getProperty("indexName"), infoStr);
+          query = es.customAnalyzing(props.getProperty(MudrodConstants.ES_INDEX_NAME), infoStr);
         } catch (InterruptedException | ExecutionException e) {
           throw new RuntimeException("Error performing custom analyzing", e);
         }
@@ -501,8 +503,9 @@ private boolean insertHelper(SessionNode entry, SessionNode node) {
               if (!bDownloadB) {
 
                 String[] queries = query.split(",");
-                for (String query1 : queries) {
-                  RankingTrainData trainData = new RankingTrainData(query1, datasetA, datasetB);
+                for (int l = 0; l < queries.length; l++) {
+                  RankingTrainData trainData = new RankingTrainData(queries[l], datasetA, datasetB);
+
                   trainData.setSessionId(this.sessionID);
                   trainData.setIndex(indexName);
                   trainData.setFilter(filter);
diff --git a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
index d5fe805..4ab063b 100644
--- a/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
+++ b/core/src/main/java/org/apache/sdap/mudrod/weblog/structure/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes data structure needed for web log analysis
  */
-package org.apache.sdap.mudrod.weblog.structure;
+package org.apache.sdap.mudrod.weblog.structure;
\ No newline at end of file
diff --git a/core/src/main/resources/config.properties b/core/src/main/resources/config.properties
new file mode 100644
index 0000000..0162e1d
--- /dev/null
+++ b/core/src/main/resources/config.properties
@@ -0,0 +1,76 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); 
+# you may not use this file except in compliance with the License. 
+# You may obtain  a copy of the License at 
+#  
+# http://www.apache.org/licenses/LICENSE-2.0 Unless 
+#  
+# required by applicable law or agreed to in writing, software 
+# distributed under the License is distributed on an "AS IS" 
+# BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
+# express or implied. See the License for the specific language 
+# governing permissions and limitations under the License. 
+# Define some default values that can be overridden by system properties
+# Logging Threshold
+ 
+
+# Database configuration
+mudrod.cluster.name=MudrodES
+mudrod.es.transport.tcp.port = 9300
+mudrod.es.unicast.hosts = 127.0.0.1
+mudrod.es.http.port = 9200
+mudrod.es.index = mudrod
+    
+# Spark related
+# Log processing type. Possible values include 'sequential' or 'parallel'
+mudrod.processing.type = parallel
+mudrod.spark.app.name = MudrodSparkApp
+mudrod.spark.master = local[4]
+mudrod.spark.optimize = repartition
+    
+# Web log processing configuration
+# index name has to be all lowercase
+mudrod.log.index = log
+mudrod.ftp.prefix = FTP.
+mudrod.http.prefix = WWW.
+mudrod.base.url = http://podaac.jpl.nasa.gov
+mudrod.black.request.list = .js, .css, .jpg, .png, .ico, image_captcha, autocomplete, .gif, /alldata/, /api/, get / http/1.1, .jpeg, /ws/
+mudrod.black.agent.list = crawler, googlebot, bingbot, slurp, yacybot, rogerbot, yandexbot, -, apache-httpclient, java, curl
+mudrod.search.freq = 100
+mudrod.view.freq = 200
+mudrod.download.freq = 100
+mudrod.request.rate = 30
+mudrod.session.port = 8080
+mudrod.session.url = /mudrod-service/session.html
+mudrod.request.time.gap = 600   
+mudrod.view.url.marker = /dataset/
+mudrod.search.url.marker = /datasetlist?
+# In order to better parse a URL (getting searching keyword, etc.), please consider custimize 
+# org.apache.sdap.mudrod.weblog.structure.RequestUrl - GetSearchInfo, getFilterInfo
+	
+# User search history
+mudrod.query.min = 0
+mudrod.user.history.weight = 2
+	
+# clickstream
+mudrod.download.weight = 3
+mudrod.clickstream.svd.d = 50
+mudrod.clickstream.weight = 2
+			 	
+# metadata
+mudrod.metadata.download = 0
+mudrod.metadata.download.url = https://podaac.jpl.nasa.gov/api/dataset?startIndex=$startIndex&amp;entries=10&amp;sortField=Dataset-AllTimePopularity&amp;sortOrder=asc&amp;id=&amp;value=&amp;search=
+mudrod.metadata.svd.d = 50
+mudrod.metadata.url = null
+mudrod.metadata.weight = 1
+mudrod.metadata.type = RawMetadata
+       	
+# ranking, ${svmSgdModel.value} is resolved at build time. See the property in core/pom.xml for the value
+mudrod.ranking.model = ${svmSgdModel.value}.zip
+       	
+# recommendation
+mudrod.metadata.id = Dataset-ShortName
+mudrod.metadata.semantic.fields = DatasetParameter-Term,DatasetParameter-Variable,Dataset-ExtractTerm
+
+# ontology service implementation. Possible values include EsipPortal - EsipPortalOntology EsipCOR - EsipCOROntology Local - org.apache.sdap.mudrod.ontology.process.Local
+mudrod.ontology.implementation = Local
+mudrod.ontology.weight = 2
diff --git a/core/src/main/resources/config.xml b/core/src/main/resources/config.xml
deleted file mode 100644
index 5a5bcee..0000000
--- a/core/src/main/resources/config.xml
+++ /dev/null
@@ -1,129 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-  Licensed under the Apache License, Version 2.0 (the "License"); 
-  you may not use this file except in compliance with the License. 
-  You may obtain  a copy of the License at 
-  
-  http://www.apache.org/licenses/LICENSE-2.0 Unless 
-  
-  required by applicable law or agreed to in writing, software 
-  distributed under the License is distributed on an "AS IS" 
-  BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
-  express or implied. See the License for the specific language 
-  governing permissions and limitations under the License. 
--->
-<Config>
-    <para name="Cleanup_type_prefix">cleanupLog</para>
-
-    <para name="clickStreamLinkageType">ClickStreamLinkage</para>
-
-    <para name="clickStreamMatrixType">clickstreamMatrix</para>
-
-    <para name="clickstreamSVDDimension">50</para>
-
-    <para name="clickStream_w">2</para>
-
-    <para name="commentType">comment</para>
-
-    <para name="downloadf">100</para>
-
-    <para name="downloadWeight">3</para>
-
-    <para name="clusterName">MudrodES</para>
-
-    <para name="ES_Transport_TCP_Port">9300</para>
-
-    <para name="ES_unicast_hosts">127.0.0.1</para>
-
-    <para name="ES_HTTP_port">9200</para>
-
-    <para name="indexName">mudrod</para>
-
-    <para name="ftpPrefix">FTP.</para>
-    
-    <para name="FTP_type_prefix">rawftp</para>
-
-    <para name="HTTP_type_prefix">rawhttp</para>
-
-    <para name="httpPrefix">WWW.</para>
-
-    <para name="logIndexName">podaaclog</para>
-
-    <para name="metadataLinkageType">MetadataLinkage</para>
-
-    <para name="metadataSVDDimension">50</para>
-
-    <para name="metadataurl">null</para>
-
-    <para name="metadata_w">1</para>
-
-    <para name="mini_userHistory">5</para>
-
-    <!--
-    The ontology service implementation. Possible values include
-     EsipPortal - EsipPortalOntology
-     EsipCOR - EsipCOROntology
-     Local - org.apache.sdap.mudrod.ontology.process.Local
-     -->
-    <para name="mudrod.ontology.implementation">Local</para>
-
-    <para name="ontologyLinkageType">SWEETLinkage</para>
-
-    <para name="ontology_w">2</para>
-
-    <!--
-     Log processing type. Possible values include
-     'sequential' or 'parallel'.
-     -->
-    <para name="processingType">parallel</para>
-
-    <para name="raw_metadataType">RawMetadata</para>
-
-    <para name="searchf">100</para>
-
-    <para name="sendingrate">30</para>
-
-    <para name="SessionPort">8080</para>
-
-    <para name="SessionStats_prefix">sessionstats</para>
-
-    <para name="SessionUrl">/mudrod-service/session.html</para>
-
-    <!-- The name of your application. This will appear in the UI and in log data.-->
-    <para name="spark.app.name">MudrodSparkApp</para>
-
-    <!--
-    The default Spark cluster manager to connect to. See the list of allowed master URL's.
-    For more information, consult http://spark.apache.org/docs/latest/submitting-applications.html#master-urls
-    -->
-    <para name="spark.master">local[4]</para>
-
-    <!-- ${svmSgdModel.value} is resolved at build time. See the property in core/pom.xml for the value -->
-    <para name="svmSgdModel">${svmSgdModel.value}.zip</para>
-    
-    <para name="timegap">600</para>
-    
-    <para name="userHistoryLinkageType">UserHistoryLinkage</para>
-    
-    <para name="userHistory_w">2</para>
-
-    <para name="viewf">200</para>
-
-
-
-    <!-- FOLLOWING NEEDS TO BE ADDED TO MudrodConstants.java -->
-    <para name="recom_metadataType">RecomMetadata</para>
-    <!-- recommendation -->
-    <para name="metadataTermTFIDFSimType">MetadataTermTFIDFSim</para>
-    <para name="metadataWordTFIDFSimType">MetadataWordTFIDFSim</para>
-    <para name="metadataCodeSimType">MetadataCodeSim</para>
-    <para name="metadataSessionBasedSimType">MetadataSBSim</para>
-    <para name="metadataTopicSimType">MetadataTBSim</para>
-    <!--
-    Log processing parallel optimization type. Possible values include
-    default - MudrodConstants.PARALLEL_OPTIMIZATION_DEFAULT
-    repartition - MudrodConstants.PARALLEL_OPTIMIZATION_REPARTITION
-    -->
-    <para name="parallelOptimization">repartition</para>
-
-</Config>
diff --git a/service/.classpath b/service/.classpath
new file mode 100644
index 0000000..5ea154b
--- /dev/null
+++ b/service/.classpath
@@ -0,0 +1,250 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" output="target/classes" path="src/main/java">
+		<attributes>
+			<attribute name="optional" value="true"/>
+			<attribute name="maven.pomderived" value="true"/>
+		</attributes>
+	</classpathentry>
+	<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
+		<attributes>
+			<attribute name="maven.pomderived" value="true"/>
+		</attributes>
+	</classpathentry>
+	<classpathentry excluding="**/*.java" kind="src" path="target/maven-shared-archive-resources"/>
+	<classpathentry kind="var" path="M2_REPO/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
+		<attributes>
+			<attribute name="maven.pomderived" value="true"/>
+		</attributes>
+	</classpathentry>
+	<classpathentry kind="src" path="/mudrod-core"/>
+	<classpathentry kind="var" path="M2_REPO/com/google/code/gson/gson/2.5/gson-2.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/jdom/jdom/2.0.2/jdom-2.0.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/elasticsearch/5.2.0/elasticsearch-5.2.0.jar" sourcepath="M2_REPO/org/elasticsearch/elasticsearch/5.2.0/elasticsearch-5.2.0-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-core/6.4.0/lucene-core-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-analyzers-common/6.4.0/lucene-analyzers-common-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-backward-codecs/6.4.0/lucene-backward-codecs-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-grouping/6.4.0/lucene-grouping-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-highlighter/6.4.0/lucene-highlighter-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-join/6.4.0/lucene-join-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-memory/6.4.0/lucene-memory-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-misc/6.4.0/lucene-misc-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-queries/6.4.0/lucene-queries-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-queryparser/6.4.0/lucene-queryparser-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-sandbox/6.4.0/lucene-sandbox-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-spatial/6.4.0/lucene-spatial-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-spatial-extras/6.4.0/lucene-spatial-extras-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-spatial3d/6.4.0/lucene-spatial3d-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/lucene/lucene-suggest/6.4.0/lucene-suggest-6.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/securesm/1.1/securesm-1.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/sf/jopt-simple/jopt-simple/5.0.2/jopt-simple-5.0.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/carrotsearch/hppc/0.7.1/hppc-0.7.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/joda-time/joda-time/2.9.4/joda-time-2.9.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/yaml/snakeyaml/1.15/snakeyaml-1.15.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/core/jackson-core/2.8.6/jackson-core-2.8.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/dataformat/jackson-dataformat-smile/2.8.6/jackson-dataformat-smile-2.8.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/dataformat/jackson-dataformat-yaml/2.8.6/jackson-dataformat-yaml-2.8.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.8.6/jackson-dataformat-cbor-2.8.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/tdunning/t-digest/3.0/t-digest-3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/hdrhistogram/HdrHistogram/2.1.6/HdrHistogram-2.1.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/java/dev/jna/jna/4.2.2/jna-4.2.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/client/transport/5.2.0/transport-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/plugin/transport-netty3-client/5.2.0/transport-netty3-client-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/netty/netty/3.8.0.Final/netty-3.8.0.Final.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/plugin/reindex-client/5.2.0/reindex-client-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/client/rest/5.2.0/rest-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/httpcomponents/httpcore/4.4.5/httpcore-4.4.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/httpcomponents/httpasyncclient/4.1.2/httpasyncclient-4.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/httpcomponents/httpcore-nio/4.4.5/httpcore-nio-4.4.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-codec/commons-codec/1.3/commons-codec-1.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/plugin/lang-mustache-client/5.2.0/lang-mustache-client-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/spullara/mustache/java/compiler/0.9.3/compiler-0.9.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/plugin/percolator-client/5.2.0/percolator-client-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/elasticsearch/elasticsearch-spark-20_2.11/5.2.0/elasticsearch-spark-20_2.11-5.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-sql_2.11/2.1.0/spark-sql_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/univocity/univocity-parsers/2.2.1/univocity-parsers-2.2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-sketch_2.11/2.1.0/spark-sketch_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-tags_2.11/2.1.0/spark-tags_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scalatest/scalatest_2.11/2.2.6/scalatest_2.11-2.2.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/scala-reflect/2.11.7/scala-reflect-2.11.7.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11/1.0.2/scala-xml_2.11-1.0.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-core_2.11/2.1.0/spark-core_2.11-2.1.0.jar" sourcepath="M2_REPO/org/apache/spark/spark-core_2.11/2.1.0/spark-core_2.11-2.1.0-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/avro/avro-mapred/1.7.7/avro-mapred-1.7.7-hadoop2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/avro/avro-ipc/1.7.7/avro-ipc-1.7.7.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/avro/avro/1.7.7/avro-1.7.7.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/tukaani/xz/1.0/xz-1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/slf4j/slf4j-api/1.7.16/slf4j-api-1.7.16.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/avro/avro-ipc/1.7.7/avro-ipc-1.7.7-tests.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/twitter/chill_2.11/0.8.0/chill_2.11-0.8.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/twitter/chill-java/0.8.0/chill-java-0.8.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/esotericsoftware/kryo-shaded/3.0.3/kryo-shaded-3.0.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/objenesis/objenesis/2.1/objenesis-2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/xbean/xbean-asm5-shaded/4.4/xbean-asm5-shaded-4.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-client/2.2.0/hadoop-client-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-common/2.2.0/hadoop-common-2.2.0.jar" sourcepath="M2_REPO/org/apache/hadoop/hadoop-common/2.2.0/hadoop-common-2.2.0-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-annotations/2.2.0/hadoop-annotations-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/google/guava/guava/14.0.1/guava-14.0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-cli/commons-cli/1.2/commons-cli-1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-math/2.1/commons-math-2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/xmlenc/xmlenc/0.52/xmlenc-0.52.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-io/commons-io/2.1/commons-io-2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-net/commons-net/2.2/commons-net-2.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/log4j/log4j/1.2.17/log4j-1.2.17.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-lang/commons-lang/2.5/commons-lang-2.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-collections/commons-collections/3.2.1/commons-collections-3.2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-digester/commons-digester/1.8/commons-digester-1.8.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-auth/2.2.0/hadoop-auth-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/zookeeper/zookeeper/3.4.5/zookeeper-3.4.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-hdfs/2.2.0/hadoop-hdfs-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-app/2.2.0/hadoop-mapreduce-client-app-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-common/2.2.0/hadoop-mapreduce-client-common-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-yarn-common/2.2.0/hadoop-yarn-common-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-yarn-api/2.2.0/hadoop-yarn-api-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/jettison/jettison/1.3.8/jettison-1.3.8.jar"/>
+	<classpathentry kind="var" path="M2_REPO/stax/stax-api/1.0.1/stax-api-1.0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-yarn-client/2.2.0/hadoop-yarn-client-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-core/2.2.0/hadoop-mapreduce-client-core-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-yarn-server-common/2.2.0/hadoop-yarn-server-common-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-shuffle/2.2.0/hadoop-mapreduce-client-shuffle-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.2.0/hadoop-mapreduce-client-jobclient-2.2.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-launcher_2.11/2.1.0/spark-launcher_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-network-common_2.11/2.1.0/spark-network-common_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/netty/netty-all/4.0.42.Final/netty-all-4.0.42.Final.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/core/jackson-databind/2.6.5/jackson-databind-2.6.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/core/jackson-annotations/2.6.5/jackson-annotations-2.6.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-network-shuffle_2.11/2.1.0/spark-network-shuffle_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/dropwizard/metrics/metrics-core/3.1.2/metrics-core-3.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-unsafe_2.11/2.1.0/spark-unsafe_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/java/dev/jets3t/jets3t/0.7.1/jets3t-0.7.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/curator/curator-recipes/2.4.0/curator-recipes-2.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/curator/curator-framework/2.4.0/curator-framework-2.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/curator/curator-client/2.4.0/curator-client-2.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/slf4j/jul-to-slf4j/1.7.16/jul-to-slf4j-1.7.16.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/slf4j/jcl-over-slf4j/1.7.16/jcl-over-slf4j-1.7.16.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/roaringbitmap/RoaringBitmap/0.5.11/RoaringBitmap-0.5.11.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/json4s/json4s-jackson_2.11/3.2.11/json4s-jackson_2.11-3.2.11.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/json4s/json4s-core_2.11/3.2.11/json4s-core_2.11-3.2.11.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/json4s/json4s-ast_2.11/3.2.11/json4s-ast_2.11-3.2.11.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/scalap/2.11.0/scalap-2.11.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/scala-compiler/2.11.0/scala-compiler-2.11.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11/1.0.1/scala-parser-combinators_2.11-1.0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/core/jersey-client/2.22.2/jersey-client-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/core/jersey-common/2.22.2/jersey-common-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.22.2/jersey-guava-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/hk2-api/2.4.0-b34/hk2-api-2.4.0-b34.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/hk2-utils/2.4.0-b34/hk2-utils-2.4.0-b34.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/external/aopalliance-repackaged/2.4.0-b34/aopalliance-repackaged-2.4.0-b34.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/external/javax.inject/2.4.0-b34/javax.inject-2.4.0-b34.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/hk2-locator/2.4.0-b34/hk2-locator-2.4.0-b34.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/core/jersey-server/2.22.2/jersey-server-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/media/jersey-media-jaxb/2.22.2/jersey-media-jaxb-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/containers/jersey-container-servlet/2.22.2/jersey-container-servlet-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/glassfish/jersey/containers/jersey-container-servlet-core/2.22.2/jersey-container-servlet-core-2.22.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/clearspring/analytics/stream/2.7.0/stream-2.7.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/dropwizard/metrics/metrics-jvm/3.1.2/metrics-jvm-3.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/dropwizard/metrics/metrics-json/3.1.2/metrics-json-3.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/io/dropwizard/metrics/metrics-graphite/3.1.2/metrics-graphite-3.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/module/jackson-module-scala_2.11/2.6.5/jackson-module-scala_2.11-2.6.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/fasterxml/jackson/module/jackson-module-paranamer/2.6.5/jackson-module-paranamer-2.6.5.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/oro/oro/2.0.8/oro-2.0.8.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/razorvine/pyrolite/4.13/pyrolite-4.13.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/sf/py4j/py4j/0.10.4/py4j-0.10.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-catalyst_2.11/2.1.0/spark-catalyst_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/janino/janino/3.0.0/janino-3.0.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/janino/commons-compiler/3.0.0/commons-compiler-3.0.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/antlr/antlr4-runtime/4.5.3/antlr4-runtime-4.5.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-column/1.8.1/parquet-column-1.8.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-common/1.8.1/parquet-common-1.8.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-encoding/1.8.1/parquet-encoding-1.8.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-hadoop/1.8.1/parquet-hadoop-1.8.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-format/2.3.0-incubating/parquet-format-2.3.0-incubating.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/parquet/parquet-jackson/1.8.1/parquet-jackson-1.8.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-streaming_2.11/2.1.0/spark-streaming_2.11-2.1.0.jar" sourcepath="M2_REPO/org/apache/spark/spark-streaming_2.11/2.1.0/spark-streaming_2.11-2.1.0-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-mllib_2.11/2.1.0/spark-mllib_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-graphx_2.11/2.1.0/spark-graphx_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/spark/spark-mllib-local_2.11/2.1.0/spark-mllib-local_2.11-2.1.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scalanlp/breeze_2.11/0.12/breeze_2.11-0.12.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/scalanlp/breeze-macros_2.11/0.12/breeze-macros_2.11-0.12.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/core/1.1.2/core-1.1.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/sourceforge/f2j/arpack_combined_all/0.1/arpack_combined_all-0.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/rwl/jtransforms/2.4.0/jtransforms-2.4.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/spire-math/spire_2.11/0.7.4/spire_2.11-0.7.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/spire-math/spire-macros_2.11/0.7.4/spire-macros_2.11-0.7.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/chuusai/shapeless_2.11/2.0.0/shapeless_2.11-2.0.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/jpmml/pmml-model/1.2.15/pmml-model-1.2.15.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/jpmml/pmml-schema/1.2.15/pmml-schema-1.2.15.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-osx-x86_64/1.1/netlib-native_ref-osx-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/native_ref-java/1.1/native_ref-java-1.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/jniloader/1.1/jniloader-1.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-linux-x86_64/1.1/netlib-native_ref-linux-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-linux-i686/1.1/netlib-native_ref-linux-i686-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-win-x86_64/1.1/netlib-native_ref-win-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-win-i686/1.1/netlib-native_ref-win-i686-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_ref-linux-armhf/1.1/netlib-native_ref-linux-armhf-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-osx-x86_64/1.1/netlib-native_system-osx-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/native_system-java/1.1/native_system-java-1.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-linux-x86_64/1.1/netlib-native_system-linux-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-linux-i686/1.1/netlib-native_system-linux-i686-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-linux-armhf/1.1/netlib-native_system-linux-armhf-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-win-x86_64/1.1/netlib-native_system-win-x86_64-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/fommil/netlib/netlib-native_system-win-i686/1.1/netlib-native_system-win-i686-1.1-natives.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/jena/jena-core/3.3.0/jena-core-3.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/jena/jena-iri/3.3.0/jena-iri-3.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/xerces/xercesImpl/2.11.0/xercesImpl-2.11.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/xml-apis/xml-apis/1.4.01/xml-apis-1.4.01.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/jena/jena-base/3.3.0/jena-base-3.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/jena/jena-shaded-guava/3.3.0/jena-shaded-guava-3.3.0.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/commons/commons-csv/1.3/commons-csv-1.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/com/github/andrewoma/dexx/collection/0.6/collection-0.6.jar"/>
+	<classpathentry kind="src" path="/mudrod-web"/>
+	<classpathentry kind="var" path="M2_REPO/junit/junit/4.12/junit-4.12.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/hamcrest/hamcrest-core/1.3/hamcrest-core-1.3.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/cxf/cxf-rt-frontend-jaxrs/3.1.7/cxf-rt-frontend-jaxrs-3.1.7.jar" sourcepath="M2_REPO/org/apache/cxf/cxf-rt-frontend-jaxrs/3.1.7/cxf-rt-frontend-jaxrs-3.1.7-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/cxf/cxf-core/3.1.7/cxf-core-3.1.7.jar" sourcepath="M2_REPO/org/apache/cxf/cxf-core/3.1.7/cxf-core-3.1.7-sources.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/woodstox/woodstox-core-asl/4.4.1/woodstox-core-asl-4.4.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/codehaus/woodstox/stax2-api/3.1.4/stax2-api-3.1.4.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/ws/xmlschema/xmlschema-core/2.2.1/xmlschema-core-2.2.1.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/cxf/cxf-rt-transports-http/3.1.7/cxf-rt-transports-http-3.1.7.jar"/>
+	<classpathentry kind="var" path="M2_REPO/org/apache/cxf/cxf-rt-rs-client/3.1.7/cxf-rt-rs-client-3.1.7.jar"/>
+	<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
+		<attributes>
+			<attribute name="maven.pomderived" value="true"/>
+			<attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
+		</attributes>
+	</classpathentry>
+	<classpathentry kind="output" path="target/classes"/>
+</classpath>
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java b/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
deleted file mode 100644
index 072ba5e..0000000
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/RecomDatasetsResource.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License"); you 
- * may not use this file except in compliance with the License. 
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sdap.mudrod.services.recommendation;
-
-import com.google.gson.JsonObject;
-
-import javax.servlet.ServletContext;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import org.apache.sdap.mudrod.main.MudrodEngine;
-import org.apache.sdap.mudrod.recommendation.structure.RecomData;
-
-/**
- * A Dataset recommendation resource.
- */
-@Path("/recommendation")
-public class RecomDatasetsResource {
-
-  private MudrodEngine mEngine;
-
-  public RecomDatasetsResource(@Context ServletContext sc) {
-    this.mEngine = (MudrodEngine) sc.getAttribute("MudrodInstance");
-  }
-
-  @GET
-  @Path("/status")
-  @Produces("text/html")
-  public Response status() {
-    return Response.ok("<h1>This is MUDROD Recommendation Datasets Resource: running correctly...</h1>").build();
-  }
-
-  @PUT
-  @Path("{shortname}")
-  @Produces(MediaType.APPLICATION_JSON)
-  @Consumes("text/plain")
-  public Response hybridRecommendation(@PathParam("shortname") String shortName) {
-    JsonObject json = new JsonObject();
-    if (shortName != null) {
-      RecomData recom = new RecomData(mEngine.getConfig(), mEngine.getESDriver(), null);
-      json = new JsonObject();
-      json.add("RecommendationData", recom.getRecomDataInJson(shortName, 10));
-    }
-    return Response.ok(json.toString(), MediaType.APPLICATION_JSON).build();
-  }
-}
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/DefaultExceptionMapper.java b/service/src/main/java/org/apache/sdap/mudrod/services/DefaultExceptionMapper.java
similarity index 100%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/DefaultExceptionMapper.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/DefaultExceptionMapper.java
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/MudrodContextListener.java b/service/src/main/java/org/apache/sdap/mudrod/services/MudrodContextListener.java
similarity index 95%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/MudrodContextListener.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/MudrodContextListener.java
index e08bf78..d96fa27 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/MudrodContextListener.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/MudrodContextListener.java
@@ -13,11 +13,6 @@
  */
 package org.apache.sdap.mudrod.services;
 
-import javax.servlet.ServletContext;
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-import javax.servlet.annotation.WebListener;
-
 import org.apache.sdap.mudrod.driver.ESDriver;
 import org.apache.sdap.mudrod.driver.SparkDriver;
 import org.apache.sdap.mudrod.main.MudrodEngine;
@@ -26,6 +21,10 @@
 import org.apache.sdap.mudrod.ssearch.Ranker;
 import org.apache.sdap.mudrod.ssearch.Searcher;
 
+import javax.servlet.ServletContext;
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+import javax.servlet.annotation.WebListener;
 import java.util.Properties;
 
 /**
@@ -64,11 +63,11 @@ public void contextInitialized(ServletContextEvent arg0) {
     ServletContext ctx = arg0.getServletContext();
     Searcher searcher = new Searcher(props, me.getESDriver(), null);
     Ranker ranker = new Ranker(props, me.getESDriver(), me.getSparkDriver(), "SparkSVM");
-    Ontology ontImpl = new OntologyFactory(props).getOntology();
+    // Ontology ontImpl = new OntologyFactory(props).getOntology();
     ctx.setAttribute("MudrodInstance", me);
     ctx.setAttribute("MudrodSearcher", searcher);
     ctx.setAttribute("MudrodRanker", ranker);
-    ctx.setAttribute("Ontology", ontImpl);
+    // ctx.setAttribute("Ontology", ontImpl);
   }
 
 }
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/AutoCompleteData.java b/service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/AutoCompleteData.java
similarity index 100%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/AutoCompleteData.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/AutoCompleteData.java
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/AutoCompleteResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/AutoCompleteResource.java
similarity index 99%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/AutoCompleteResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/AutoCompleteResource.java
index fb9a036..6a25edd 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/AutoCompleteResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/AutoCompleteResource.java
@@ -14,7 +14,6 @@
 package org.apache.sdap.mudrod.services.autocomplete;
 
 import com.google.gson.Gson;
-
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.slf4j.Logger;
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/package-info.java b/service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/package-info.java
similarity index 91%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/package-info.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/package-info.java
index baa7417..9ddac75 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/autocomplete/package-info.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/autocomplete/package-info.java
@@ -14,4 +14,4 @@
 /**
  * Mudrod Autocompletion resources.
  */
-package org.apache.sdap.mudrod.services.autocomplete;
+package org.apache.sdap.mudrod.services.autocomplete;
\ No newline at end of file
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/OntologyResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/ontology/OntologyResource.java
similarity index 99%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/OntologyResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/ontology/OntologyResource.java
index ea03174..0d67133 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/OntologyResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/ontology/OntologyResource.java
@@ -14,7 +14,6 @@
 package org.apache.sdap.mudrod.services.ontology;
 
 import com.google.gson.Gson;
-
 import org.apache.sdap.mudrod.ontology.Ontology;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/package-info.java b/service/src/main/java/org/apache/sdap/mudrod/services/ontology/package-info.java
similarity index 92%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/package-info.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/ontology/package-info.java
index dbc3321..a0740f8 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/ontology/package-info.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/ontology/package-info.java
@@ -14,4 +14,4 @@
 /**
  * Mudrod ontology resources.
  */
-package org.apache.sdap.mudrod.services.ontology;
+package org.apache.sdap.mudrod.services.ontology;
\ No newline at end of file
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/package-info.java b/service/src/main/java/org/apache/sdap/mudrod/services/package-info.java
similarity index 93%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/package-info.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/package-info.java
index d7c7a8f..47412da 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/package-info.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/package-info.java
@@ -14,4 +14,4 @@
 /**
  * This package includes base listeners, etc. for Mudrod services.
  */
-package org.apache.sdap.mudrod.services;
+package org.apache.sdap.mudrod.services;
\ No newline at end of file
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/recommendation/HybridRecomDatasetsResource.java
similarity index 93%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/recommendation/HybridRecomDatasetsResource.java
index 935c7ac..da4cb9d 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/HybridRecomDatasetsResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/recommendation/HybridRecomDatasetsResource.java
@@ -14,20 +14,15 @@
 package org.apache.sdap.mudrod.services.recommendation;
 
 import com.google.gson.JsonObject;
+import org.apache.sdap.mudrod.main.MudrodEngine;
+import org.apache.sdap.mudrod.recommendation.structure.HybridRecommendation;
 
 import javax.servlet.ServletContext;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
+import javax.ws.rs.*;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
-import org.apache.sdap.mudrod.main.MudrodEngine;
-import org.apache.sdap.mudrod.recommendation.structure.HybridRecommendation;
-
 /**
  * A hybrid recommendation resource for datasets.
  */
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/package-info.java b/service/src/main/java/org/apache/sdap/mudrod/services/recommendation/package-info.java
similarity index 91%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/package-info.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/recommendation/package-info.java
index aa88508..9882146 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/recommendation/package-info.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/recommendation/package-info.java
@@ -14,4 +14,4 @@
 /**
  * Mudrod recommendation resources.
  */
-package org.apache.sdap.mudrod.services.recommendation;
+package org.apache.sdap.mudrod.services.recommendation;
\ No newline at end of file
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchDatasetDetailResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/search/SearchDatasetDetailResource.java
similarity index 100%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchDatasetDetailResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/search/SearchDatasetDetailResource.java
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchMetadataResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/search/SearchMetadataResource.java
similarity index 99%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchMetadataResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/search/SearchMetadataResource.java
index 9bdf455..a659cfa 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchMetadataResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/search/SearchMetadataResource.java
@@ -16,7 +16,6 @@
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.apache.sdap.mudrod.ssearch.Ranker;
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchVocabResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/search/SearchVocabResource.java
similarity index 99%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchVocabResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/search/SearchVocabResource.java
index 6e58761..6a7eed2 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SearchVocabResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/search/SearchVocabResource.java
@@ -15,7 +15,6 @@
 
 import com.google.gson.Gson;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.integration.LinkageIntegration;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.slf4j.Logger;
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SessionDetailResource.java b/service/src/main/java/org/apache/sdap/mudrod/services/search/SessionDetailResource.java
similarity index 99%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/search/SessionDetailResource.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/search/SessionDetailResource.java
index dc31993..8ea3846 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/SessionDetailResource.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/search/SessionDetailResource.java
@@ -15,7 +15,6 @@
 
 import com.google.gson.Gson;
 import com.google.gson.JsonObject;
-
 import org.apache.sdap.mudrod.main.MudrodConstants;
 import org.apache.sdap.mudrod.main.MudrodEngine;
 import org.apache.sdap.mudrod.weblog.structure.Session;
diff --git a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/package-info.java b/service/src/main/java/org/apache/sdap/mudrod/services/search/package-info.java
similarity index 92%
rename from service/src/main/java/gov/nasa/jpl/mudrod/services/search/package-info.java
rename to service/src/main/java/org/apache/sdap/mudrod/services/search/package-info.java
index 626cd8c..009db84 100644
--- a/service/src/main/java/gov/nasa/jpl/mudrod/services/search/package-info.java
+++ b/service/src/main/java/org/apache/sdap/mudrod/services/search/package-info.java
@@ -14,4 +14,4 @@
 /**
  * Mudrod search-related resources.
  */
-package org.apache.sdap.mudrod.services.search;
+package org.apache.sdap.mudrod.services.search;
\ No newline at end of file
diff --git a/service/src/main/webapp/WEB-INF/web.xml b/service/src/main/webapp/WEB-INF/web.xml
index 195ef01..e62e668 100644
--- a/service/src/main/webapp/WEB-INF/web.xml
+++ b/service/src/main/webapp/WEB-INF/web.xml
@@ -28,7 +28,6 @@
                 org.apache.sdap.mudrod.services.autocomplete.AutoCompleteResource,
                 org.apache.sdap.mudrod.services.search.SearchDatasetDetailResource,
                 org.apache.sdap.mudrod.services.recommendation.HybridRecomDatasetsResource,
-                org.apache.sdap.mudrod.services.recommendation.RecomDatasetsResource,
                 org.apache.sdap.mudrod.services.search.SearchMetadataResource,
                 org.apache.sdap.mudrod.services.search.SearchVocabResource,
                 org.apache.sdap.mudrod.services.search.SessionDetailResource,


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services