You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2016/04/25 17:14:36 UTC

[1/9] ambari git commit: AMBARI-16091. Add 'logfeeder log level include' feature to logsearch stack definition (oleewere)

Repository: ambari
Updated Branches:
  refs/heads/trunk 25ed583ee -> cda7f2a3e


AMBARI-16091. Add 'logfeeder log level include' feature to logsearch stack definition (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cda7f2a3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cda7f2a3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cda7f2a3

Branch: refs/heads/trunk
Commit: cda7f2a3eb82a494a09ffca1a1c75ba3d658e307
Parents: 888faf2
Author: oleewere <ol...@gmail.com>
Authored: Mon Apr 25 17:09:11 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Mon Apr 25 17:10:34 2016 +0200

----------------------------------------------------------------------
 .../0.5.0/configuration/logsearch-site.xml      |  7 ++++++
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  1 +
 .../templates/input.config-logsearch.json.j2    |  6 ++---
 .../package/templates/logsearch.properties.j2   |  4 ++++
 .../0.5.0/properties/logfeeder-log4j.xml.j2     | 24 ++++++++++++++++----
 .../0.5.0/properties/logsearch-log4j.xml.j2     | 22 ++++++++++--------
 ambari-web/app/data/HDP2/site_properties.js     |  7 ++++++
 7 files changed, 53 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-site.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-site.xml
index 45aa69a..fb775bf 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-site.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-site.xml
@@ -78,4 +78,11 @@
     </description>
   </property>
 
+  <property>
+    <name>logsearch.logfeeder.include.default.level</name>
+    <value>fatal,error,warn</value>
+    <description>Include default Logfeeder log levels for Logsearch</description>
+    <display-name>Logfeeder log levels</display-name>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index b461b1f..4084243 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -146,6 +146,7 @@ logsearch_repfactor = str(config['configurations']['logsearch-site']['logsearch.
 
 logsearch_solr_collection_service_logs = default('/configurations/logsearch-site/logsearch.solr.collection.service.logs', 'hadoop_logs')
 logsearch_solr_collection_audit_logs = default('/configurations/logsearch-site/logsearch.solr.collection.audit.logs','audit_logs')
+logsearch_logfeeder_log_level_include = default('/configurations/logsearch-site/logsearch.logfeeder.include.default.level', 'fatal,error,warn')
 
 solr_audit_logs_use_ranger = default('/configurations/logsearch-env/logsearch_solr_audit_logs_use_ranger', False)
 solr_audit_logs_url = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
index d9ef66d..06aaba6 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
@@ -20,17 +20,17 @@
     {
       "type":"logsearch_app",
       "rowtype":"service",
-      "path":"{{logsearch_log_dir}}/logsearch.log"
+      "path":"{{logsearch_log_dir}}/logsearch.json"
     },
     {
       "type":"logsearch_feeder",
       "rowtype":"service",
-      "path":"{{logfeeder_log_dir}}/logfeeder.log"
+      "path":"{{logfeeder_log_dir}}/logfeeder.json"
     },
     {
       "type":"logsearch_perf",
       "rowtype":"service",
-      "path":"{{logsearch_log_dir}}/logsearch-performance.log"
+      "path":"{{logsearch_log_dir}}/logsearch-performance.json"
     }
 
   ],

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
index 1e183bd..190d4e2 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
@@ -38,3 +38,7 @@ solr.audit_logs.replication_factor={{logsearch_repfactor}}
 solr.core.history=history
 solr.history.config_name=history
 solr.history.replication_factor={{logsearch_repfactor}}
+
+#Logfeeder Settings
+
+logfeeder.include.default.level={{logsearch_logfeeder_log_level_include}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-log4j.xml.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-log4j.xml.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-log4j.xml.j2
index cab680f..77318ee 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-log4j.xml.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-log4j.xml.j2
@@ -34,24 +34,38 @@ limitations under the License.
     </layout>
   </appender>
 
+  <appender name="rolling_file_json"
+    class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
+    <param name="file" value="logs/logsearch-logfeeder.json" />
+    <param name="append" value="true" />
+    <param name="maxFileSize" value="10MB" />
+    <param name="maxBackupIndex" value="10" />
+    <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
+  </appender>
+
+
   <!-- Logs to suppress BEGIN -->
   <category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
     <priority value="error" />
-    <appender-ref ref="rolling_file" />
+    <appender-ref ref="daily_rolling_file" />
   </category>
 
   <category name="apache.solr.client.solrj.impl.CloudSolrClient" additivity="false">
     <priority value="fatal" />
-    <appender-ref ref="rolling_file" />
+    <appender-ref ref="daily_rolling_file" />
   </category>
+  <!-- Logs to suppress END -->
 
   <category name="org.apache.ambari.logfeeder" additivity="false">
-    <priority value="info"/>
-    <appender-ref ref="rolling_file"/>
+    <priority value="INFO" />
+    <appender-ref ref="console" />
+    <!-- <appender-ref ref="daily_rolling_file" /> -->
+    <appender-ref ref="rolling_file_json"/>
   </category>
 
   <root>
     <priority value="warn"/>
-    <appender-ref ref="rolling_file"/>
+    <!-- <appender-ref ref="rolling_file"/> -->
+    <!-- <appender-ref ref="daily_rolling_file" /> -->
   </root>
 </log4j:configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2
index a1f5539..7dc09ba2 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2
@@ -24,8 +24,8 @@ limitations under the License.
     </layout>
   </appender>
 
-  <appender name="rolling_file" class="org.apache.log4j.RollingFileAppender">
-    <param name="file" value="{{logsearch_log_dir}}/logsearch.log"/>
+  <appender name="rolling_file_json" class="org.apache.log4j.RollingFileAppender">
+    <param name="file" value="{{logsearch_log_dir}}/logsearch.json"/>
     <param name="append" value="true"/>
     <param name="maxFileSize" value="10MB"/>
     <param name="maxBackupIndex" value="10"/>
@@ -34,8 +34,8 @@ limitations under the License.
     </layout>
   </appender>
 
-  <appender name="audit_rolling_file" class="org.apache.log4j.RollingFileAppender">
-    <param name="file" value="{{logsearch_log_dir}}/logsearch-audit.log"/>
+  <appender name="audit_rolling_file_json" class="org.apache.log4j.RollingFileAppender">
+    <param name="file" value="{{logsearch_log_dir}}/logsearch-audit.json"/>
     <param name="append" value="true"/>
     <param name="maxFileSize" value="10MB"/>
     <param name="maxBackupIndex" value="10"/>
@@ -44,8 +44,8 @@ limitations under the License.
     </layout>
   </appender>
 
-  <appender name="performance_analyzer" class="org.apache.log4j.RollingFileAppender">
-    <param name="file" value="{{logsearch_log_dir}}/logsearch-performance.log"/>
+  <appender name="performance_analyzer_json" class="org.apache.log4j.RollingFileAppender">
+    <param name="file" value="{{logsearch_log_dir}}/logsearch-performance.json"/>
     <param name="Threshold" value="info"/>
     <param name="append" value="true"/>
     <param name="maxFileSize" value="10MB"/>
@@ -56,20 +56,22 @@ limitations under the License.
   </appender>
 
   <logger name="org.apache.ambari.logsearch.audit" additivity="true">
-    <appender-ref ref="audit_rolling_file"/>
+    <appender-ref ref="audit_rolling_file_json"/>
   </logger>
 
   <logger name="org.apache.ambari.logsearch.performance" additivity="false">
-    <appender-ref ref="performance_analyzer"/>
+    <appender-ref ref="performance_analyzer_json"/>
   </logger>
 
   <category name="org.apache.ambari.logsearch" additivity="false">
     <priority value="info"/>
-    <appender-ref ref="rolling_file"/>
+    <appender-ref ref="rolling_file_json"/>
   </category>
 
   <root>
     <priority value="warn"/>
-    <appender-ref ref="rolling_file"/>
+    <!-- <appender-ref ref="console" /> -->
+    <!--<appender-ref ref="rolling_file" /> -->
+    <appender-ref ref="rolling_file_json"/>
   </root>
 </log4j:configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda7f2a3/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 19e0931..a687893 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -2171,6 +2171,13 @@ var hdp2properties = [
     "category": "Advanced logsearch-site",
     "index": 8
   },
+  {
+    "name": "logsearch.logfeeder.include.default.level",
+    "serviceName": "LOGSEARCH",
+    "filename": "logsearch-site.xml",
+    "category": "Advanced logsearch-site",
+    "index": 9
+  }
   /*logsearch-solr-env*/
   {
     "name": "logsearch_solr_datadir",


[4/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
index cf50a87..1d069d3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
@@ -21,12 +21,14 @@ package org.apache.ambari.logsearch.manager;
 
 import java.io.File;
 import java.io.IOException;
+import java.text.ParseException;
 import java.util.Date;
 import java.util.Scanner;
 
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
@@ -60,7 +62,7 @@ public class MgrBase {
   JSONUtil jsonUtil;
 
   @Autowired
-  QueryGeneration queryGenrator;
+  QueryGeneration queryGenerator;
 
   @Autowired
   StringUtil stringUtil;
@@ -68,16 +70,35 @@ public class MgrBase {
   @Autowired
   RESTErrorUtil restErrorUtil;
 
+  @Autowired
+  DateUtil dateUtil;
+
   JsonSerializer<Date> jsonDateSerialiazer = null;
   JsonDeserializer<Date> jsonDateDeserialiazer = null;
 
+  public enum LOG_TYPE {
+    SERVICE {
+      @Override
+      public String getLabel() {
+        return "Service";
+      }
+    },
+    AUDIT {
+      @Override
+      public String getLabel() {
+        return "Audit";
+      }
+    };
+    public abstract String getLabel();
+  }
+
   public MgrBase() {
     jsonDateSerialiazer = new JsonSerializer<Date>() {
 
       @Override
       public JsonElement serialize(Date paramT,
-                                   java.lang.reflect.Type paramType,
-                                   JsonSerializationContext paramJsonSerializationContext) {
+          java.lang.reflect.Type paramType,
+          JsonSerializationContext paramJsonSerializationContext) {
 
         return paramT == null ? null : new JsonPrimitive(paramT.getTime());
       }
@@ -86,38 +107,33 @@ public class MgrBase {
     jsonDateDeserialiazer = new JsonDeserializer<Date>() {
 
       @Override
-      public Date deserialize(JsonElement json,
-                              java.lang.reflect.Type typeOfT,
-                              JsonDeserializationContext context) throws JsonParseException {
+      public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT,
+          JsonDeserializationContext context) throws JsonParseException {
         return json == null ? null : new Date(json.getAsLong());
       }
 
     };
   }
 
-  public String convertObjToString(Object obj) throws IOException {
+  public String convertObjToString(Object obj) {
     if (obj == null) {
       return "";
     }
-    /*ObjectMapper mapper = new ObjectMapper();
-    ObjectWriter w = mapper.writerWithDefaultPrettyPrinter();
-    return mapper.writeValueAsString(obj);*/
 
     Gson gson = new GsonBuilder()
-      .registerTypeAdapter(Date.class, jsonDateSerialiazer)
-      .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create();
+        .registerTypeAdapter(Date.class, jsonDateSerialiazer)
+        .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create();
 
     return gson.toJson(obj);
   }
 
-
   public String getHadoopServiceConfigJSON() {
     StringBuilder result = new StringBuilder("");
 
     // Get file from resources folder
     ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader
-      .getResource("HadoopServiceConfig.json").getFile());
+    File file = new File(classLoader.getResource("HadoopServiceConfig.json")
+        .getFile());
 
     try (Scanner scanner = new Scanner(file)) {
 
@@ -131,37 +147,67 @@ public class MgrBase {
     } catch (IOException e) {
       logger.error("Unable to read HadoopServiceConfig.json", e);
       throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+          MessageEnums.ERROR_SYSTEM);
     }
 
     String hadoopServiceConfig = result.toString();
-    if (jsonUtil.isJSONValid(hadoopServiceConfig))
+    if (jsonUtil.isJSONValid(hadoopServiceConfig)) {
       return hadoopServiceConfig;
+    }
     throw restErrorUtil.createRESTException("Improper JSON",
-      MessageEnums.ERROR_SYSTEM);
+        MessageEnums.ERROR_SYSTEM);
 
   }
 
-  public VSolrLogList getLogAsPaginationProvided(SolrQuery solrQuery, SolrDaoBase solrDaoBase) {
+  public VSolrLogList getLogAsPaginationProvided(SolrQuery solrQuery,
+      SolrDaoBase solrDaoBase) {
     try {
       QueryResponse response = solrDaoBase.process(solrQuery);
+      VSolrLogList collection = new VSolrLogList();
       SolrDocumentList docList = response.getResults();
-      VSolrLogList collection = new VSolrLogList(docList);
-      collection.setStartIndex((int) docList.getStart());
-      collection.setTotalCount(docList.getNumFound());
-      Integer rowNumber = solrQuery.getRows();
-      if (rowNumber == null) {
-        logger.error("No RowNumber was set in solrQuery");
-        return new VSolrLogList();
+      if (docList != null && !docList.isEmpty()) {
+        collection.setSolrDocuments(docList);
+        collection.setStartIndex((int) docList.getStart());
+        collection.setTotalCount(docList.getNumFound());
+        Integer rowNumber = solrQuery.getRows();
+        if (rowNumber == null) {
+          logger.error("No RowNumber was set in solrQuery");
+          return new VSolrLogList();
+        }
+        collection.setPageSize(rowNumber);
       }
-      collection.setPageSize(rowNumber);
       return collection;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+  }
+
+  protected String getUnit(String unit) {
+    if (stringUtil.isEmpty(unit)) {
+      unit = "+1HOUR";
     }
+    return unit;
+  }
 
+  protected String getFrom(String from) {
+    if (stringUtil.isEmpty(from)) {
+      Date date =  dateUtil.getTodayFromDate();
+      try {
+        from = dateUtil.convertGivenDateFormatToSolrDateFormat(date);
+      } catch (ParseException e) {
+        from = "NOW";
+      }
+    }
+    return from;
   }
 
+  protected String getTo(String to) {
+    if (stringUtil.isEmpty(to)) {
+      to = "NOW";
+    }
+    return to;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
index 0a4328b..0dccb74 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
@@ -19,22 +19,17 @@
 
 package org.apache.ambari.logsearch.manager;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.ambari.logsearch.view.VNameValueList;
 import org.apache.ambari.logsearch.web.security.LogsearchSimpleAuthenticationProvider;
-import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class PublicMgr extends MgrBase {
-  private static Logger logger = Logger.getLogger(PublicMgr.class);
-
   @Autowired
   LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
 
@@ -46,10 +41,6 @@ public class PublicMgr extends MgrBase {
     nameValue.setValue("" + simpleAuthenticationProvider.isEnable());
     nameValues.add(nameValue);
     nameValueList.setVNameValues(nameValues);
-    try {
-      return convertObjToString(nameValueList);
-    } catch (IOException e) {
-      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
+    return convertObjToString(nameValueList);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
index d76a3e7..a60402e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
@@ -32,7 +32,7 @@ import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
 import org.apache.ambari.logsearch.query.QueryGeneration;
-import org.apache.ambari.logsearch.util.ConfigUtil;
+import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.util.StringUtil;
@@ -58,6 +58,8 @@ import com.google.gson.JsonParseException;
 
 @Component
 public class UserConfigMgr extends MgrBase {
+  private static final String DEFAULT_LEVELS = "FATAL,ERROR,WARN,INFO,DEBUG,TRACE";
+
   static Logger logger = Logger.getLogger(UserConfigMgr.class);
 
   @Autowired
@@ -80,34 +82,35 @@ public class UserConfigMgr extends MgrBase {
     SolrInputDocument solrInputDoc = new SolrInputDocument();
     if (!isValid(vHistory)) {
       throw restErrorUtil.createRESTException("No FilterName Specified",
-        MessageEnums.INVALID_INPUT_DATA);
+          MessageEnums.INVALID_INPUT_DATA);
     }
 
     if (isNotUnique(vHistory) && !vHistory.isOverwrite()) {
       throw restErrorUtil.createRESTException(
-        "Name '" + vHistory.getFilterName() + "' already exists",
-        MessageEnums.INVALID_INPUT_DATA);
+          "Name '" + vHistory.getFilterName() + "' already exists",
+          MessageEnums.INVALID_INPUT_DATA);
     }
 
     solrInputDoc.addField(LogSearchConstants.ID, vHistory.getId());
-    solrInputDoc.addField(LogSearchConstants.USER_NAME,
-      vHistory.getUserName());
+    solrInputDoc.addField(LogSearchConstants.USER_NAME, vHistory.getUserName());
     solrInputDoc.addField(LogSearchConstants.VALUES, vHistory.getValues());
     solrInputDoc.addField(LogSearchConstants.FILTER_NAME,
-      vHistory.getFilterName());
-    solrInputDoc.addField(LogSearchConstants.ROW_TYPE,
-      vHistory.getRowType());
+        vHistory.getFilterName());
+    solrInputDoc.addField(LogSearchConstants.ROW_TYPE, vHistory.getRowType());
     List<String> shareNameList = vHistory.getShareNameList();
-    if (shareNameList != null && !shareNameList.isEmpty())
+    if (shareNameList != null && !shareNameList.isEmpty()){
       solrInputDoc.addField(LogSearchConstants.SHARE_NAME_LIST, shareNameList);
+    }
+    solrInputDoc.addField(LogSearchConstants.COMPOSITE_KEY,
+        vHistory.getFilterName() + "-" + vHistory.getUserName());
 
     try {
       userConfigSolrDao.addDocs(solrInputDoc);
       return convertObjToString(solrInputDoc);
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      logger.error("Error saving user config. solrDoc=" + solrInputDoc, e);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -118,16 +121,17 @@ public class UserConfigMgr extends MgrBase {
     if (filterName != null && rowType != null) {
       SolrQuery solrQuery = new SolrQuery();
       filterName = solrUtil.makeSearcableString(filterName);
-      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":"
-        + filterName + "-" + rowType);
+      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName
+          + "-" + rowType);
       queryGenerator.setRowCount(solrQuery, 0);
       try {
-        Long numFound = userConfigSolrDao.process(solrQuery)
-          .getResults().getNumFound();
-        if (numFound > 0)
+        Long numFound = userConfigSolrDao.process(solrQuery).getResults()
+            .getNumFound();
+        if (numFound > 0) {
           return true;
+        }
       } catch (SolrException | SolrServerException | IOException e) {
-        logger.error(e);
+        logger.error("Error while checking if userConfig is unique.", e);
       }
     }
     return false;
@@ -136,18 +140,18 @@ public class UserConfigMgr extends MgrBase {
   private boolean isValid(VUserConfig vHistory) {
 
     return !stringUtil.isEmpty(vHistory.getFilterName())
-      && !stringUtil.isEmpty(vHistory.getRowType())
-      && !stringUtil.isEmpty(vHistory.getUserName())
-      && !stringUtil.isEmpty(vHistory.getValues());
+        && !stringUtil.isEmpty(vHistory.getRowType())
+        && !stringUtil.isEmpty(vHistory.getUserName())
+        && !stringUtil.isEmpty(vHistory.getValues());
   }
 
   public void deleteUserConfig(String id) {
     try {
       userConfigSolrDao.removeDoc("id:" + id);
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      logger.error("Deleting userCounfig. id=" + id, e);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -158,24 +162,21 @@ public class UserConfigMgr extends MgrBase {
     VUserConfigList userConfigList = new VUserConfigList();
 
     String rowType = (String) searchCriteria
-      .getParamValue(LogSearchConstants.ROW_TYPE);
+        .getParamValue(LogSearchConstants.ROW_TYPE);
     if (stringUtil.isEmpty(rowType)) {
-      throw restErrorUtil.createRESTException(
-        "row type was not specified",
-        MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("row type was not specified",
+          MessageEnums.INVALID_INPUT_DATA);
     }
 
     String userName = (String) searchCriteria
-      .getParamValue(LogSearchConstants.USER_NAME);
+        .getParamValue(LogSearchConstants.USER_NAME);
     if (stringUtil.isEmpty(userName)) {
-      throw restErrorUtil.createRESTException(
-        "user name was not specified",
-        MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("user name was not specified",
+          MessageEnums.INVALID_INPUT_DATA);
     }
     String filterName = (String) searchCriteria
-      .getParamValue(LogSearchConstants.FILTER_NAME);
-    filterName = stringUtil.isEmpty(filterName) ? "*" : "*" + filterName
-      + "*";
+        .getParamValue(LogSearchConstants.FILTER_NAME);
+    filterName = stringUtil.isEmpty(filterName) ? "*" : "*" + filterName + "*";
 
     try {
 
@@ -183,19 +184,20 @@ public class UserConfigMgr extends MgrBase {
       queryGenerator.setMainQuery(userConfigQuery, null);
       queryGenerator.setPagination(userConfigQuery, searchCriteria);
       queryGenerator.setSingleIncludeFilter(userConfigQuery,
-        LogSearchConstants.ROW_TYPE, rowType);
+          LogSearchConstants.ROW_TYPE, rowType);
       queryGenerator.setSingleORFilter(userConfigQuery,
-        LogSearchConstants.USER_NAME, userName,
-        LogSearchConstants.SHARE_NAME_LIST, userName);
+          LogSearchConstants.USER_NAME, userName,
+          LogSearchConstants.SHARE_NAME_LIST, userName);
       queryGenerator.setSingleIncludeFilter(userConfigQuery,
-        LogSearchConstants.FILTER_NAME, filterName);
+          LogSearchConstants.FILTER_NAME, filterName);
 
       if (stringUtil.isEmpty(searchCriteria.getSortBy())
-        || searchCriteria.getSortBy().equals("historyName"))
-        searchCriteria
-          .setSortBy(LogSearchConstants.FILTER_NAME);
-      if (stringUtil.isEmpty(searchCriteria.getSortType()))
+          || searchCriteria.getSortBy().equals("historyName")) {
+        searchCriteria.setSortBy(LogSearchConstants.FILTER_NAME);
+      }
+      if (stringUtil.isEmpty(searchCriteria.getSortType())) {
         searchCriteria.setSortType("" + SolrQuery.ORDER.asc);
+      }
 
       queryGenerator.setSingleSortOrder(userConfigQuery, searchCriteria);
       solrList = userConfigSolrDao.process(userConfigQuery).getResults();
@@ -205,21 +207,19 @@ public class UserConfigMgr extends MgrBase {
       for (SolrDocument solrDoc : solrList) {
         VUserConfig userConfig = new VUserConfig();
         userConfig.setFilterName(""
-          + solrDoc.get(LogSearchConstants.FILTER_NAME));
+            + solrDoc.get(LogSearchConstants.FILTER_NAME));
         userConfig.setId("" + solrDoc.get(LogSearchConstants.ID));
         userConfig.setValues("" + solrDoc.get(LogSearchConstants.VALUES));
-        userConfig.setRowType(""
-          + solrDoc.get(LogSearchConstants.ROW_TYPE));
+        userConfig.setRowType("" + solrDoc.get(LogSearchConstants.ROW_TYPE));
         try {
           List<String> shareNameList = (List<String>) solrDoc
-            .get(LogSearchConstants.SHARE_NAME_LIST);
+              .get(LogSearchConstants.SHARE_NAME_LIST);
           userConfig.setShareNameList(shareNameList);
         } catch (Exception e) {
           // do nothing
         }
 
-        userConfig.setUserName(""
-          + solrDoc.get(LogSearchConstants.USER_NAME));
+        userConfig.setUserName("" + solrDoc.get(LogSearchConstants.USER_NAME));
 
         configList.add(userConfig);
       }
@@ -231,17 +231,17 @@ public class UserConfigMgr extends MgrBase {
       userConfigList.setPageSize((int) searchCriteria.getMaxRows());
 
       userConfigList.setTotalCount((long) solrList.getNumFound());
-      userConfigList
-        .setResultSize((int) (configList.size() - searchCriteria
+      userConfigList.setResultSize((int) (configList.size() - searchCriteria
           .getStartIndex()));
     } catch (SolrException | SolrServerException | IOException e) {
       // do nothing
+      logger.error(e);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-    try {
-      return convertObjToString(userConfigList);
-    } catch (IOException e) {
-      return "";
-    }
+
+    return convertObjToString(userConfigList);
+
   }
 
   public String updateUserConfig(VUserConfig vuserConfig) {
@@ -270,35 +270,49 @@ public class UserConfigMgr extends MgrBase {
       if (documentList != null && documentList.size() > 0) {
         SolrDocument configDoc = documentList.get(0);
         String configJson = jsonUtil.objToJson(configDoc);
-        HashMap<String, Object> configMap = (HashMap<String, Object>) jsonUtil.jsonToMapObject(configJson);
+        HashMap<String, Object> configMap = (HashMap<String, Object>) jsonUtil
+            .jsonToMapObject(configJson);
         String json = (String) configMap.get(LogSearchConstants.VALUES);
-        logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
+        logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(
+            json, VLogfeederFilterWrapper.class);
         logfeederFilterWrapper.setId("" + configDoc.get(LogSearchConstants.ID));
 
       } else {
+        String logfeederDefaultLevels = PropertiesUtil
+            .getProperty("logfeeder.include.default.level", DEFAULT_LEVELS);
+        JSONArray levelJsonArray = new JSONArray();
+        try {
+          String levelArray[] = logfeederDefaultLevels.split(",");
+          for (String level : levelArray) {
+            levelJsonArray.put(level.toUpperCase());
+          }
+        } catch (Exception e) {
+          logger.error("Error spliting logfeederDefaultLevels="
+              + logfeederDefaultLevels, e);
+          throw restErrorUtil.createRESTException(e.getMessage(),
+              MessageEnums.ERROR_SYSTEM);
+        }
         String hadoopServiceString = getHadoopServiceConfigJSON();
+        String key = null;
+        JSONArray componentArray = null;
         try {
-
           JSONObject componentList = new JSONObject();
           JSONObject jsonValue = new JSONObject();
 
-          JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceString)
-            .getJSONObject("service");
-          Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject
-            .keys();
+          JSONObject hadoopServiceJsonObject = new JSONObject(
+              hadoopServiceString).getJSONObject("service");
+          Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys();
           while (hadoopSerivceKeys.hasNext()) {
-            String key = hadoopSerivceKeys.next();
-            JSONArray componentArray = hadoopServiceJsonObject
-              .getJSONObject(key).getJSONArray("components");
+            key = hadoopSerivceKeys.next();
+            componentArray = hadoopServiceJsonObject.getJSONObject(key)
+                .getJSONArray("components");
             for (int i = 0; i < componentArray.length(); i++) {
-              JSONObject compJsonObject = (JSONObject) componentArray
-                .get(i);
-              String componentName = compJsonObject
-                .getString("name");
+              JSONObject compJsonObject = (JSONObject) componentArray.get(i);
+              String componentName = compJsonObject.getString("name");
               JSONObject innerContent = new JSONObject();
               innerContent.put("label", componentName);
               innerContent.put("hosts", new JSONArray());
-              innerContent.put("defaultLevels", new JSONArray());
+              innerContent.put("defaultLevels", levelJsonArray);
               componentList.put(componentName, innerContent);
             }
           }
@@ -306,14 +320,16 @@ public class UserConfigMgr extends MgrBase {
           return saveUserFiter(jsonValue.toString());
 
         } catch (JsonParseException | JSONException je) {
-          logger.error(je);
+          logger.error("Error parsing JSON. key=" + key + ", componentArray="
+              + componentArray, je);
           logfeederFilterWrapper = new VLogfeederFilterWrapper();
         }
       }
       return convertObjToString(logfeederFilterWrapper);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -324,11 +340,12 @@ public class UserConfigMgr extends MgrBase {
    * @return
    */
   public String saveUserFiter(String json) {
-    VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json,
-      VLogfeederFilterWrapper.class);
+    VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil
+        .jsonToObj(json, VLogfeederFilterWrapper.class);
     if (logfeederFilterWrapper == null) {
       logger.error("filter json is not a valid :" + json);
-      throw restErrorUtil.createRESTException("Invalid filter json", MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException("Invalid filter json",
+          MessageEnums.ERROR_SYSTEM);
     }
     String id = logfeederFilterWrapper.getId();
     if (!stringUtil.isEmpty(id)) {
@@ -336,17 +353,20 @@ public class UserConfigMgr extends MgrBase {
     }
     String filterName = LogSearchConstants.LOGFEEDER_FILTER_NAME;
     json = jsonUtil.objToJson(logfeederFilterWrapper);
-    SolrInputDocument conifgDocument = new SolrInputDocument();
-    conifgDocument.addField(LogSearchConstants.ID, new Date().getTime());
-    conifgDocument.addField(LogSearchConstants.ROW_TYPE, filterName);
-    conifgDocument.addField(LogSearchConstants.VALUES, json);
-    conifgDocument.addField(LogSearchConstants.USER_NAME, filterName);
-    conifgDocument.addField(LogSearchConstants.FILTER_NAME, filterName);
+    SolrInputDocument configDocument = new SolrInputDocument();
+    configDocument.addField(LogSearchConstants.ID, new Date().getTime());
+    configDocument.addField(LogSearchConstants.ROW_TYPE, filterName);
+    configDocument.addField(LogSearchConstants.VALUES, json);
+    configDocument.addField(LogSearchConstants.USER_NAME, filterName);
+    configDocument.addField(LogSearchConstants.FILTER_NAME, filterName);
+    configDocument.addField(LogSearchConstants.COMPOSITE_KEY, filterName + "-"
+        + filterName);
     try {
-      userConfigSolrDao.addDocs(conifgDocument);
+      userConfigSolrDao.addDocs(configDocument);
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
+      logger.error("Saving UserConfig. config=" + configDocument, e);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return getUserFilter();
   }
@@ -356,27 +376,25 @@ public class UserConfigMgr extends MgrBase {
     try {
       SolrQuery userListQuery = new SolrQuery();
       queryGenerator.setMainQuery(userListQuery, null);
-      queryGenerator.setFacetField(userListQuery,
-        LogSearchConstants.USER_NAME);
-      QueryResponse queryResponse = userConfigSolrDao
-        .process(userListQuery);
-      if (queryResponse == null)
+      queryGenerator.setFacetField(userListQuery, LogSearchConstants.USER_NAME);
+      QueryResponse queryResponse = userConfigSolrDao.process(userListQuery);
+      if (queryResponse == null) {
         return convertObjToString(userList);
+      }
       List<Count> counList = queryResponse.getFacetField(
-        LogSearchConstants.USER_NAME).getValues();
+          LogSearchConstants.USER_NAME).getValues();
       for (Count cnt : counList) {
         String userName = cnt.getName();
         userList.add(userName);
       }
     } catch (SolrException | SolrServerException | IOException e) {
+      logger.warn("Error getting all users.", e);
       // do nothing
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
+    return convertObjToString(userList);
 
-    try {
-      return convertObjToString(userList);
-    } catch (IOException e) {
-      return "";
-    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index 38a31fb..2f47ec5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -27,94 +27,67 @@ import java.util.regex.Pattern;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.util.BizUtil;
+import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class QueryGeneration extends QueryGenerationBase {
 
-  static Logger logger = Logger.getLogger(QueryGeneration.class);
-
-  @Autowired
-  SolrUtil solrUtil;
-
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  @Autowired
-  BizUtil bizUtil;
-
-  @Autowired
-  DateUtil dateUtil;
-
-  @Autowired
-  StringUtil stringUtil;
-
-  @Autowired
-  JSONUtil jsonUtil;
-
-  public SolrQuery commonFilterQuery(SearchCriteria searchCriteria) {
+  private static Logger logger = Logger.getLogger(QueryGeneration.class);
 
+  public SolrQuery commonServiceFilterQuery(SearchCriteria searchCriteria) {
+    LOG_TYPE logType = LOG_TYPE.SERVICE;
     SolrQuery solrQuery = new SolrQuery();
-
-    String jsonHCNames = (String) searchCriteria
-      .getParamValue("treeParams");
+    String treeParams = (String) searchCriteria.getParamValue("treeParams");
     String givenQuery = (String) searchCriteria.getParamValue("q");
     String level = (String) searchCriteria.getParamValue("level");
-
     String startTime = (String) searchCriteria.getParamValue("from");
     String endTime = (String) searchCriteria.getParamValue("to");
     String iMessage = (String) searchCriteria.getParamValue("iMessage");
     String eMessage = (String) searchCriteria.getParamValue("eMessage");
     String gEmessage = (String) searchCriteria.getParamValue("gEMessage");
-    String selectedComp = (String) searchCriteria
-      .getParamValue("selectComp");
+    String selectedComp = (String) searchCriteria.getParamValue("selectComp");
     String bundleId = (String) searchCriteria
-      .getParamValue(LogSearchConstants.BUNDLE_ID);
+        .getParamValue(LogSearchConstants.BUNDLE_ID);
     String globalExcludeComp = (String) searchCriteria
-      .getParamValue("gMustNot");
+        .getParamValue("gMustNot");
     String unselectedComp = (String) searchCriteria
-      .getParamValue("unselectComp");
+        .getParamValue("unselectComp");
     String urlHostName = (String) searchCriteria.getParamValue("host_name");
-    String urlComponents = (String) searchCriteria.getParamValue("components_name");
-
+    String urlComponentName = (String) searchCriteria
+        .getParamValue("component_name");
+    String file_name = (String) searchCriteria.getParamValue("file_name");
     String advQuery = (String) searchCriteria.getParamValue("advanceSearch");
+    // build advance query
     if (!stringUtil.isEmpty(advQuery)) {
       String advQueryParameters[] = advQuery.split(Pattern.quote("}{"));
       SolrQuery advSolrQuery = new SolrQuery();
-
       for (String queryParam : advQueryParameters) {
         String params[] = queryParam.split(Pattern.quote("="));
-        advSolrQuery.setParam(params[0], params[1]);
+        if (params != null && params.length > 1)
+          advSolrQuery.setParam(params[0], params[1]);
       }
-
       // Building and adding levels to filters
       setFilterClauseWithFieldName(advSolrQuery, level,
-        LogSearchConstants.SOLR_LEVEL, "", "OR");
+          LogSearchConstants.SOLR_LEVEL, "", CONDITION.OR);
 
       // Adding Logtime to filters
-      setSingleRangeFilter(advSolrQuery, LogSearchConstants.LOGTIME,
-        startTime, endTime);
+      setSingleRangeFilter(advSolrQuery, LogSearchConstants.LOGTIME, startTime,
+          endTime);
 
       // Building and adding exlcude components to filters
       setFilterClauseWithFieldName(advSolrQuery, unselectedComp,
-        LogSearchConstants.SOLR_COMPONENT,
-        LogSearchConstants.MINUS_OPERATOR, "AND");
+          LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+          CONDITION.AND);
 
       // Building and adding exlcude components to filters
       setFilterClauseWithFieldName(advSolrQuery, selectedComp,
-        LogSearchConstants.SOLR_COMPONENT,
-        LogSearchConstants.NO_OPERATOR, "OR");
+          LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.NO_OPERATOR,
+          CONDITION.OR);
 
       // Set Pagination
       setPagination(advSolrQuery, searchCriteria);
@@ -126,52 +99,67 @@ public class QueryGeneration extends QueryGenerationBase {
 
     // Adding Logtime to filters
     setSingleRangeFilter(solrQuery, LogSearchConstants.LOGTIME, startTime,
-      endTime);
+        endTime);
+
+    // String mainFilterQuery = buildQueryFromJSONCompHost(jsonHCNames,
+    // selectedComp);
 
-    String mainFilterQuery = buildQueryFromJSONCompHost(jsonHCNames, selectedComp);
+    // if (mainFilterQuery != null && !mainFilterQuery.equals(""))
+    // solrQuery.addFilterQuery(mainFilterQuery);
 
-    if (mainFilterQuery != null && !mainFilterQuery.equals(""))
-      solrQuery.addFilterQuery(mainFilterQuery);
+    // add component filter
+    addFilter(solrQuery, selectedComp, LogSearchConstants.SOLR_COMPONENT,
+        CONDITION.OR);
+
+    // add treeParams filter
+    // hosts comma separated list
+    addFilterQueryFromArray(solrQuery, treeParams,
+        LogSearchConstants.SOLR_HOST, CONDITION.OR);
 
     // Building and adding levels to filters
-    setFilterClauseWithFieldName(solrQuery, level, LogSearchConstants.SOLR_LEVEL, "", "OR");
+    setFilterClauseWithFieldName(solrQuery, level,
+        LogSearchConstants.SOLR_LEVEL, LogSearchConstants.NO_OPERATOR,
+        CONDITION.OR);
 
     // Building and adding include string to filters
-    setFilterClauseForSolrSearchableString(solrQuery, iMessage, "OR", "",
-      LogSearchConstants.SOLR_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, iMessage, CONDITION.OR,
+        LogSearchConstants.NO_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
 
     // Building and adding global exclude string to filters
-    setFilterClauseForSolrSearchableString(solrQuery, gEmessage, "AND",
-      LogSearchConstants.MINUS_OPERATOR,
-      LogSearchConstants.SOLR_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, gEmessage, CONDITION.AND,
+        LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
 
     // Building and adding exclude string to filter
-    setFilterClauseForSolrSearchableString(solrQuery, eMessage, "AND",
-      LogSearchConstants.MINUS_OPERATOR,
-      LogSearchConstants.SOLR_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, eMessage, CONDITION.AND,
+        LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
 
     // Building and adding logfile to filters
     applyLogFileFilter(solrQuery, searchCriteria);
 
     // Building and adding exclude components to filters
     setFilterClauseWithFieldName(solrQuery, globalExcludeComp,
-      LogSearchConstants.SOLR_COMPONENT,
-      LogSearchConstants.MINUS_OPERATOR, "AND");
+        LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+        CONDITION.AND);
 
     // Building and adding exlcude components to filters
     setFilterClauseWithFieldName(solrQuery, unselectedComp,
-      LogSearchConstants.SOLR_COMPONENT,
-      LogSearchConstants.MINUS_OPERATOR, "AND");
-
-    //Building and addding host names given url
-    setFilterClauseWithFieldName(solrQuery, urlHostName,
-      LogSearchConstants.SOLR_HOST,
-      "", "OR");
-
-    //Building and addding component names given url
-    setFilterClauseWithFieldName(solrQuery, urlComponents,
-      LogSearchConstants.SOLR_COMPONENT,
-      "", "OR");
+        LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+        CONDITION.AND);
+
+    // Building and adding host names given url
+    // setFilterClauseWithFieldName(solrQuery, urlHostName,
+    // LogSearchConstants.SOLR_HOST,
+    // "", "OR");
+    urlHostName = solrUtil.escapeQueryChars(urlHostName);
+    setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, urlHostName);
+    //
+    // //Building and addding component names given url
+    // setFilterClauseWithFieldName(solrQuery, urlComponents,
+    // LogSearchConstants.SOLR_COMPONENT,
+    // "", "OR");
+    urlComponentName = solrUtil.escapeQueryChars(urlComponentName);
+    setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT,
+        urlComponentName);
 
     // Set Pagination
     setPagination(solrQuery, searchCriteria);
@@ -182,156 +170,161 @@ public class QueryGeneration extends QueryGenerationBase {
     // Set Bundle Id
     setSingleIncludeFilter(solrQuery, LogSearchConstants.BUNDLE_ID, bundleId);
 
+    // Set filename
+    file_name = solrUtil.escapeQueryChars(file_name);
+    setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_PATH, file_name);
+    // include query
     this.setUserSpecificFilter(searchCriteria, solrQuery,
-      LogSearchConstants.INCLUDE_QUERY,
-      LogSearchConstants.INCLUDE_QUERY);
-
+        LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY,
+        logType);
+    // exclude query
     this.setUserSpecificFilter(searchCriteria, solrQuery,
-      LogSearchConstants.EXCLUDE_QUERY,
-      LogSearchConstants.EXCLUDE_QUERY);
+        LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY,
+        logType);
     return solrQuery;
   }
 
   public void applyLogFileFilter(SolrQuery solrQuery,
-                                 SearchCriteria searchCriteria) {
-    String hostLogFile = (String) searchCriteria
-      .getParamValue("hostLogFile");
-    String compLogFile = (String) searchCriteria
-      .getParamValue("compLogFile");
+      SearchCriteria searchCriteria) {
+    String hostLogFile = (String) searchCriteria.getParamValue("hostLogFile");
+    String compLogFile = (String) searchCriteria.getParamValue("compLogFile");
     String givenQuery = (String) searchCriteria.getParamValue("q");
     String logfileQuery = "";
-    if (hostLogFile != null && !hostLogFile.equals("")
-      && compLogFile != null && !compLogFile.equals("")) {
-      logfileQuery = "host:" + hostLogFile + " AND type:" + compLogFile;
-      if (givenQuery != null && !givenQuery.equals(""))
-        logfileQuery = "(" + givenQuery + ") AND (" + logfileQuery
-          + ")";
-      solrQuery.addFilterQuery(logfileQuery);
+    if (!stringUtil.isEmpty(hostLogFile) && !stringUtil.isEmpty(compLogFile)) {
+      logfileQuery = LogSearchConstants.SOLR_HOST + ":" + hostLogFile + " "
+          + CONDITION.AND + " " + LogSearchConstants.SOLR_COMPONENT + ":"
+          + compLogFile;
+      if (!stringUtil.isEmpty(givenQuery)) {
+        logfileQuery = "(" + givenQuery + ") " + CONDITION.AND + " ("
+            + logfileQuery + ")";
+      }
+      if (!stringUtil.isEmpty(logfileQuery)) {
+        solrQuery.addFilterQuery(logfileQuery);
+      }
     }
   }
 
   public void setUserSpecificFilter(SearchCriteria searchCriteria,
-                                    SolrQuery solrQuery, String paramName, String operation) {
-
+      SolrQuery solrQuery, String paramName, String operation, LOG_TYPE logType) {
     String queryString = (String) searchCriteria.getParamValue(paramName);
     String columnQuery = (String) searchCriteria
-      .getParamValue(LogSearchConstants.COLUMN_QUERY);
-    if (!stringUtil.isEmpty(queryString) && "[]".equals(queryString))
+        .getParamValue(LogSearchConstants.COLUMN_QUERY);
+    if (stringUtil.isEmpty(queryString)) {
       queryString = null;
+    }
+    // if (!stringUtil.isEmpty(queryString) && "[]".equals(queryString)) {
+    // queryString = null;
+    // }
     if (!stringUtil.isEmpty(columnQuery) && stringUtil.isEmpty(queryString)
-      && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY))
+        && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
       queryString = columnQuery;
+    }
     List<String> conditionQuries = new ArrayList<String>();
     List<String> referalConditionQuries = new ArrayList<String>();
     List<String> elments = new ArrayList<String>();
-    if (!stringUtil.isEmpty(queryString)) {
-      List<HashMap<String, Object>> queryList = jsonUtil
+    // convert json to list of hashmap
+    List<HashMap<String, Object>> queryList = jsonUtil
         .jsonToMapObjectList(queryString);
-      if (!stringUtil.isEmpty(columnQuery)
-        && !columnQuery.equals(queryString) && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
+    // null and size check
+    if (queryList != null && queryList.size() > 0) {
+      if (!stringUtil.isEmpty(columnQuery) && !columnQuery.equals(queryString)
+          && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
         List<HashMap<String, Object>> columnQueryList = jsonUtil
-          .jsonToMapObjectList(columnQuery);
-        queryList.addAll(columnQueryList);
+            .jsonToMapObjectList(columnQuery);
+        if (columnQueryList != null && columnQueryList.size() > 0) {
+          queryList.addAll(columnQueryList);
+        }
       }
-
       for (HashMap<String, Object> columnListMap : queryList) {
         String orQuery = "";
-        String field = "";
-        for (String key : columnListMap.keySet()) {
-          String originalKey = getOriginalKey(key);
-          String value = getOriginalValue(originalKey, ""
-            + columnListMap.get(key));
-          orQuery = originalKey + ":"
-            + putWildCardByType(value, originalKey);
-
-          boolean isSame = false;
-          for (String temp : elments) {
-            if (key.equals(temp))
-              isSame = true;
-          }
-          if (isSame
-            && !operation
-            .equals(LogSearchConstants.EXCLUDE_QUERY)) {
-            for (String tempCondition : conditionQuries) {
-              if (tempCondition.contains(originalKey)) {
-                String newCondtion = tempCondition + " OR "
-                  + orQuery;
-                referalConditionQuries.remove(tempCondition);
-                referalConditionQuries.add(newCondtion);
+        StringBuilder field = new StringBuilder();
+        if (columnListMap != null) {
+          for (String key : columnListMap.keySet()) {
+            if (!stringUtil.isEmpty(key)) {
+              String originalKey = getOriginalKey(key, logType);
+              String value = getOriginalValue(originalKey,
+                  "" + columnListMap.get(key));
+              orQuery = putWildCardByType(value, originalKey, logType);
+              boolean isSame = false;
+              if (elments.contains(key)) {
+                isSame = true;
+              }
+              if (isSame && !operation.equals(LogSearchConstants.EXCLUDE_QUERY)) {
+                for (String tempCondition : conditionQuries) {
+                  if (tempCondition.contains(originalKey)) {
+                    String newCondtion = tempCondition + " "
+                        + CONDITION.OR.name() + " " + orQuery;
+                    referalConditionQuries.remove(tempCondition);
+                    referalConditionQuries.add(newCondtion);
+                  }
+                }
+                conditionQuries.removeAll(conditionQuries);
+                conditionQuries.addAll(referalConditionQuries);
+              } else {
+                conditionQuries.add(orQuery.toString());
+                referalConditionQuries.add(orQuery.toString());
               }
+              field.append(key);
+              elments.add(field.toString());
             }
-            conditionQuries.removeAll(conditionQuries);
-            conditionQuries.addAll(referalConditionQuries);
-          } else {
-            conditionQuries.add(orQuery);
-            referalConditionQuries.add(orQuery);
           }
-
-          field = key;
-          elments.add(field);
         }
-
       }
     }
-    if (!referalConditionQuries.isEmpty()) {
+    if (!referalConditionQuries.isEmpty() && !stringUtil.isEmpty(operation)) {
       if (operation.equals(LogSearchConstants.INCLUDE_QUERY)
-        || operation.equals(LogSearchConstants.COLUMN_QUERY)) {
-        for (String filter : referalConditionQuries)
-          solrQuery.addFilterQuery(filter);
+          || operation.equals(LogSearchConstants.COLUMN_QUERY)) {
+        for (String filter : referalConditionQuries) {
+          if (!stringUtil.isEmpty(filter)) {
+            solrQuery.addFilterQuery(filter);
+          }
+        }
       } else if (operation.equals(LogSearchConstants.EXCLUDE_QUERY)) {
-
         for (String filter : referalConditionQuries) {
-          filter = "-" + filter;
-          solrQuery.addFilterQuery(filter);
+          if (!stringUtil.isEmpty(filter)) {
+            filter = LogSearchConstants.MINUS_OPERATOR + filter;
+            solrQuery.addFilterQuery(filter);
+          }
         }
       }
     }
   }
 
   public SolrQuery commonAuditFilterQuery(SearchCriteria searchCriteria) {
-
+    LOG_TYPE logType = LOG_TYPE.AUDIT;
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
-
     String startTime = (String) searchCriteria.getParamValue("startTime");
     String endTime = (String) searchCriteria.getParamValue("endTime");
     String selectedComp = (String) searchCriteria
-      .getParamValue("includeString");
-
+        .getParamValue("includeString");
     this.setFilterClauseWithFieldName(solrQuery, selectedComp,
-      LogSearchConstants.AUDIT_COMPONENT,
-      LogSearchConstants.NO_OPERATOR, "OR");
-
+        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.NO_OPERATOR,
+        CONDITION.OR);
     String globalExcludeComp = (String) searchCriteria
-      .getParamValue("gMustNot");
-
+        .getParamValue("gMustNot");
     this.setUserSpecificFilter(searchCriteria, solrQuery,
-      LogSearchConstants.INCLUDE_QUERY,
-      LogSearchConstants.INCLUDE_QUERY);
-
+        LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY,
+        logType);
     this.setUserSpecificFilter(searchCriteria, solrQuery,
-      LogSearchConstants.EXCLUDE_QUERY,
-      LogSearchConstants.EXCLUDE_QUERY);
-
+        LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY,
+        logType);
     String unselectedComp = (String) searchCriteria
-      .getParamValue("unselectComp");
-
+        .getParamValue("unselectComp");
     this.setFilterClauseWithFieldName(solrQuery, globalExcludeComp,
-      LogSearchConstants.AUDIT_COMPONENT,
-      LogSearchConstants.MINUS_OPERATOR, "AND");
-
+        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+        CONDITION.AND);
     // Building and adding exlcude components to filters
     this.setFilterClauseWithFieldName(solrQuery, unselectedComp,
-      LogSearchConstants.AUDIT_COMPONENT,
-      LogSearchConstants.MINUS_OPERATOR, "AND");
-
+        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+        CONDITION.AND);
     // Adding Logtime to filters
     this.setSingleRangeFilter(solrQuery, LogSearchConstants.AUDIT_EVTTIME,
-      startTime, endTime);
-
+        startTime, endTime);
     this.setPagination(solrQuery, searchCriteria);
     try {
-      if (searchCriteria.getSortBy().isEmpty()) {
+      if (searchCriteria.getSortBy() == null
+          || searchCriteria.getSortBy().isEmpty()) {
         searchCriteria.setSortBy(LogSearchConstants.AUDIT_EVTTIME);
         searchCriteria.setSortType(SolrQuery.ORDER.desc.toString());
       }
@@ -339,52 +332,40 @@ public class QueryGeneration extends QueryGenerationBase {
       searchCriteria.setSortBy(LogSearchConstants.AUDIT_EVTTIME);
       searchCriteria.setSortType(SolrQuery.ORDER.desc.toString());
     }
-
     this.setSortOrderDefaultServiceLog(solrQuery, searchCriteria);
     return solrQuery;
   }
 
-  private String putWildCardByType(String str, String key) {
-
-    String auditSuffix = PropertiesUtil
-      .getProperty("auditlog.solr.core.logs");
-    String serviceLogs = PropertiesUtil.getProperty("solr.core.logs");
-
-    String type = ConfigUtil.schemaFieldsName.get(key + auditSuffix);
-    if (type == null)
+  private String putWildCardByType(String str, String key, LOG_TYPE logType) {
+    String type;
+    switch (logType) {
+    case AUDIT:
+      String auditSuffix = PropertiesUtil
+          .getProperty("auditlog.solr.core.logs");
+      type = ConfigUtil.schemaFieldsName.get(key + auditSuffix);
+      break;
+    case SERVICE:
+      String serviceLogs = PropertiesUtil.getProperty("solr.core.logs");
       type = ConfigUtil.schemaFieldsName.get(key + serviceLogs);
-    if (type == null)
-      return "*" + str + "*";
-    if ("text_std_token_lower_case".equalsIgnoreCase(type))
-      return giveSplittedStringQuery(str);
-    if ("key_lower_case".equalsIgnoreCase(type)
-      || "string".equalsIgnoreCase(type))
-      //return solrUtil.makeSolrSearchString(str);
-      return solrUtil.makeSolrSearchStringWithoutAsterisk(str);
-    if ("ip_address".equalsIgnoreCase(type))
-      return str;
-    return putEscapeCharacterForNumber(str);
-  }
-
-  private String giveSplittedStringQuery(String str) {
-    try {
-      String splittedString[] = str
-        .split("/|-|@|&|^|%|$|#|!|~|:|;|\\*|\\+");
-      String newStr = "(";
-      int cnt = 0;
-      for (String normalString : splittedString) {
-        cnt++;
-        if (!normalString.isEmpty()) {
-          newStr += "*" + normalString + "*";
-        }
-        if (!normalString.isEmpty() && cnt < splittedString.length)
-          newStr += " AND ";
-      }
-      newStr += ")";
-      return newStr;
-    } catch (Exception e) {
-      return "*" + str + "*";
+      break;
+    default:
+      // set as null
+      type = null;
+    }
+    if (key.equalsIgnoreCase(LogSearchConstants.SOLR_LOG_MESSAGE)) {
+      return solrUtil.escapeForLogMessage(key, str);
     }
+    if (type == null) {
+      return key + ":" + "*" + str + "*";
+    } else if ("text_std_token_lower_case".equalsIgnoreCase(type)) {
+      return key + ":" + solrUtil.escapeForStandardTokenizer(str);
+    } else if ("key_lower_case".equalsIgnoreCase(type)
+        || "string".equalsIgnoreCase(type)) {
+      return key + ":" + solrUtil.makeSolrSearchStringWithoutAsterisk(str);
+    } else if ("ip_address".equalsIgnoreCase(type)) {
+      return key + ":" + str;
+    }
+    return key + ":" + putEscapeCharacterForNumber(str);
   }
 
   private String putEscapeCharacterForNumber(String str) {
@@ -403,40 +384,56 @@ public class QueryGeneration extends QueryGenerationBase {
 
   private String getOriginalValue(String name, String value) {
     String solrValue = PropertiesUtil.getProperty(name);
-
+    if (stringUtil.isEmpty(solrValue)) {
+      return value;
+    }
     try {
-      String propertyFieldMappings[] = solrValue.split(",");
-      HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
-      for (String temp : propertyFieldMappings) {
-        String arrayValue[] = temp.split(":");
-        propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH),
-          arrayValue[1].toLowerCase(Locale.ENGLISH));
+      String propertyFieldMappings[] = solrValue
+          .split(LogSearchConstants.LIST_SEPARATOR);
+      if (propertyFieldMappings != null && propertyFieldMappings.length > 0) {
+        HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
+        for (String temp : propertyFieldMappings) {
+          if (!stringUtil.isEmpty(temp)) {
+            String arrayValue[] = temp.split(":");
+            if (arrayValue.length > 1) {
+              propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH),
+                  arrayValue[1].toLowerCase(Locale.ENGLISH));
+            } else {
+              logger.warn("array length is less than required length 1");
+            }
+          }
+        }
+        String originalValue = propertyFieldValue.get(value
+            .toLowerCase(Locale.ENGLISH));
+        if (!stringUtil.isEmpty(originalValue)) {
+          return originalValue;
+        }
       }
-      String originalValue = propertyFieldValue.get(value.toLowerCase(Locale.ENGLISH));
-      if (originalValue != null && !originalValue.isEmpty())
-        return originalValue;
-
     } catch (Exception e) {
       // do nothing
     }
     return value;
-
   }
 
-  private String getOriginalKey(String key) {
-    String originalServiceKey = ConfigUtil.serviceLogsColumnMapping.get(key
-      + LogSearchConstants.UI_SUFFIX);
-    String originalAuditKey = ConfigUtil.auditLogsColumnMapping.get(key
-      + LogSearchConstants.UI_SUFFIX);
-    if (originalAuditKey != null && originalServiceKey == null) {
-      return originalAuditKey;
-    }
-    if (originalServiceKey != null && originalAuditKey == null) {
-      return originalServiceKey;
+  private String getOriginalKey(String key, LOG_TYPE logType) {
+    String originalKey;
+    switch (logType) {
+    case AUDIT:
+      originalKey = ConfigUtil.auditLogsColumnMapping.get(key
+          + LogSearchConstants.UI_SUFFIX);
+      break;
+    case SERVICE:
+      originalKey = ConfigUtil.serviceLogsColumnMapping.get(key
+          + LogSearchConstants.UI_SUFFIX);
+      break;
+    default:
+      originalKey = null;
+      // set as null
     }
-    if (originalAuditKey != null && originalServiceKey != null) {
-      return originalServiceKey;
+    if (stringUtil.isEmpty(originalKey)) {
+      // return default values
+      return key;
     }
-    return key;
+    return originalKey;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
index e357d02..cc61127 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
@@ -31,11 +31,10 @@ import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.ORDER;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import com.google.gson.Gson;
+
 public abstract class QueryGenerationBase extends QueryBase {
 
   static Logger logger = Logger.getLogger(QueryGenerationBase.class);
@@ -49,76 +48,92 @@ public abstract class QueryGenerationBase extends QueryBase {
   @Autowired
   JSONUtil jsonUtil;
 
+  public static enum CONDITION {
+    OR, AND
+  }
+
   // SetMethods to apply to the query
   public void setFilterClauseForSolrSearchableString(SolrQuery solrQuery,
-                                                     String commaSepratedString, String booleanOperator, String opr,
-                                                     String messageField) {
-    String operator = opr;
+      String commaSepratedString, CONDITION condition, String operator,
+      String messageField) {
     String filterQuery = "";
-    if (commaSepratedString != null && !commaSepratedString.isEmpty()) {
-
-      String queryMsg = "";
-      operator = operator == null ? "" : operator;
+    if (!stringUtil.isEmpty(commaSepratedString)) {
+      StringBuilder queryMsg = new StringBuilder();
+      operator = (operator == null ? LogSearchConstants.NO_OPERATOR : operator);
       String[] msgList = commaSepratedString
-        .split(LogSearchConstants.I_E_SEPRATOR);
+          .split(LogSearchConstants.I_E_SEPRATOR);
       int count = 0;
       for (String temp : msgList) {
         count += 1;
-
-        queryMsg = queryMsg + " " + operator + messageField + ":"
-          + solrUtil.makeSolrSearchString(temp);
-        if (msgList.length > count)
-          queryMsg = queryMsg + " " + booleanOperator + " ";
+        if (LogSearchConstants.SOLR_LOG_MESSAGE.equalsIgnoreCase(messageField)) {
+          queryMsg.append(" " + operator
+              + solrUtil.escapeForLogMessage(messageField, temp));
+        } else {
+          temp = solrUtil.escapeForStandardTokenizer(temp);
+          if(temp.startsWith("\"") && temp.endsWith("\"")){
+            temp = temp.substring(1);
+            temp = temp.substring(0, temp.length()-1);
+          }
+          temp = "*" + temp + "*";
+          queryMsg.append(" " + operator + messageField + ":"
+              + temp);
+        }
+        if (msgList.length > count){
+          queryMsg.append(" " + condition.name() + " ");
+        }
       }
-      filterQuery = queryMsg;
+      filterQuery = queryMsg.toString();
       solrQuery.addFilterQuery(filterQuery);
       logger.debug("Filter added :- " + filterQuery);
     }
   }
 
   public void setFilterClauseWithFieldName(SolrQuery solrQuery,
-                                           String commaSepratedString, String field, String operator,
-                                           String condition) {
-    if (commaSepratedString != null && !commaSepratedString.isEmpty()) {
-      String[] arrayOfSepratedString = commaSepratedString.split(",");
-      String filterQuery;
-      if ("OR".equals(condition))
-        filterQuery = solrUtil.orList(operator + field,
-          arrayOfSepratedString, "");
-      else
-        filterQuery = solrUtil.andList(operator + field,
-          arrayOfSepratedString, "");
-      solrQuery.addFilterQuery(filterQuery);
-      logger.debug("Filter added :- " + filterQuery);
+      String commaSepratedString, String field, String operator,
+      CONDITION condition) {
+    if (!stringUtil.isEmpty(commaSepratedString)) {
+      String[] arrayOfSepratedString = commaSepratedString.split(LogSearchConstants.LIST_SEPARATOR);
+      String filterQuery = null;
+      if (CONDITION.OR.equals(condition)) {
+        filterQuery = solrUtil.orList(operator + field, arrayOfSepratedString,"");
+      } else if (CONDITION.AND.equals(condition)) {
+        filterQuery = solrUtil.andList(operator + field, arrayOfSepratedString,"");
+      }else{
+        logger.warn("Not a valid condition :" + condition.name());
+      }
+      //add
+      if(!stringUtil.isEmpty(filterQuery)){
+        solrQuery.addFilterQuery(filterQuery);
+        logger.debug("Filter added :- " + filterQuery);
+      }
+
     }
   }
 
   public void setSortOrderDefaultServiceLog(SolrQuery solrQuery,
-                                            SearchCriteria searchCriteria) {
+      SearchCriteria searchCriteria) {
     List<SolrQuery.SortClause> defaultSort = new ArrayList<SolrQuery.SortClause>();
     if (searchCriteria.getSortBy() != null
-      && (!searchCriteria.getSortBy().isEmpty())) {
+        && (!searchCriteria.getSortBy().isEmpty())) {
       ORDER order = SolrQuery.ORDER.asc;
       if (searchCriteria.getSortType() != null
-        && (!searchCriteria.getSortType().isEmpty())
-        && !searchCriteria.getSortType().equalsIgnoreCase(
-        order.toString())) {
+          && (!searchCriteria.getSortType().isEmpty())
+          && !searchCriteria.getSortType().equalsIgnoreCase(order.toString())) {
         order = SolrQuery.ORDER.desc;
       }
-      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause
-        .create(searchCriteria.getSortBy(), order);
+      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(
+          searchCriteria.getSortBy(), order);
       defaultSort.add(logtimeSortClause);
     } else {
       // by default sorting by logtime and sequence number in
       // Descending order
-      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause
-        .create(LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
-
+      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(
+          LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
       defaultSort.add(logtimeSortClause);
 
     }
     SolrQuery.SortClause sequenceNumberSortClause = SolrQuery.SortClause
-      .create(LogSearchConstants.SEQUNCE_ID, SolrQuery.ORDER.desc);
+        .create(LogSearchConstants.SEQUNCE_ID, SolrQuery.ORDER.desc);
     defaultSort.add(sequenceNumberSortClause);
     solrQuery.setSorts(defaultSort);
     logger.debug("Sort Order :-" + defaultSort);
@@ -157,20 +172,28 @@ public abstract class QueryGenerationBase extends QueryBase {
   // Example of list can be [logtime desc,seq_num desc]
   @SuppressWarnings("unchecked")
   public void setMultipleSortOrder(SolrQuery solrQuery,
-                                   SearchCriteria searchCriteria) {
+      SearchCriteria searchCriteria) {
     List<SolrQuery.SortClause> sort = new ArrayList<SolrQuery.SortClause>();
-    List<String> sortList = (List<String>) searchCriteria
-      .getParamValue("sort");
-    for (String sortOrder : sortList) {
-      String sortByAndOrder[] = sortOrder.split(" ");
-      ORDER order = sortByAndOrder[1].contains("asc") ? SolrQuery.ORDER.asc
-        : SolrQuery.ORDER.desc;
-      SolrQuery.SortClause sortOrder2 = SolrQuery.SortClause.create(
-        sortByAndOrder[0], order);
-      sort.add(sortOrder2);
-      logger.debug("Sort Order :-" + sort);
+    List<String> sortList = (List<String>) searchCriteria.getParamValue("sort");
+    if (sortList != null) {
+      for (String sortOrder : sortList) {
+        if (!stringUtil.isEmpty(sortOrder)) {
+          String sortByAndOrder[] = sortOrder.split(" ");
+          if (sortByAndOrder.length > 1) {
+            ORDER order = sortByAndOrder[1].contains("asc") ? SolrQuery.ORDER.asc
+                : SolrQuery.ORDER.desc;
+            SolrQuery.SortClause solrSortClause = SolrQuery.SortClause.create(
+                sortByAndOrder[0], order);
+            sort.add(solrSortClause);
+            logger.debug("Sort Order :-" + sort);
+          } else {
+            // log warn
+            logger.warn("Not a valid sort Clause " + sortOrder);
+          }
+        }
+      }
+      solrQuery.setSorts(sort);
     }
-    solrQuery.setSorts(sort);
   }
 
   public void setSingleIncludeFilter(SolrQuery solrQuery, String filterType,
@@ -184,25 +207,26 @@ public abstract class QueryGenerationBase extends QueryBase {
   }
 
   public void setSingleExcludeFilter(SolrQuery solrQuery, String filterType,
-                                     String filterValue) {
-    if (filterType != null && !filterType.isEmpty() && filterValue != null
-      && !filterValue.isEmpty()) {
-      String filterQuery = "-"
-        + buildFilterQuery(filterType, filterValue);
+      String filterValue) {
+    if (!stringUtil.isEmpty(filterValue) && !stringUtil.isEmpty(filterType)) {
+      String filterQuery = LogSearchConstants.MINUS_OPERATOR
+          + buildFilterQuery(filterType, filterValue);
       solrQuery.addFilterQuery(filterQuery);
       logger.debug("Filter added :- " + filterQuery);
     }
   }
 
   public void setSingleRangeFilter(SolrQuery solrQuery, String filterType,
-                                   String filterFromValue, String filterToValue) {
-    if (filterType != null && !filterType.isEmpty()
-      && filterFromValue != null && !filterFromValue.isEmpty()
-      && filterToValue != null && !filterToValue.isEmpty()) {
+      String filterFromValue, String filterToValue) {
+    if (!stringUtil.isEmpty(filterToValue)
+        && !stringUtil.isEmpty(filterType)
+        && !stringUtil.isEmpty(filterFromValue)) {
       String filterQuery = buildInclusiveRangeFilterQuery(filterType,
-        filterFromValue, filterToValue);
-      solrQuery.addFilterQuery(filterQuery);
-      logger.debug("Filter added :- " + filterQuery);
+          filterFromValue, filterToValue);
+      if (!stringUtil.isEmpty(filterQuery)) {
+        solrQuery.addFilterQuery(filterQuery);
+        logger.debug("Filter added :- " + filterQuery);
+      }
     }
   }
 
@@ -227,8 +251,10 @@ public abstract class QueryGenerationBase extends QueryBase {
         + " to " + maxRows.intValue());
   }
 
-  public void setSingleORFilter(SolrQuery solrQuery, String filterName1, String value1, String filterName2, String value2) {
-    String filterQuery = filterName1 + ":" + value1 + " OR " + filterName2 + ":" + value2;
+  public void setSingleORFilter(SolrQuery solrQuery, String filterName1,
+      String value1, String filterName2, String value2) {
+    String filterQuery = filterName1 + ":" + value1 + " " + CONDITION.OR.name()
+        + " " + filterName2 + ":" + value2;
     solrQuery.setFilterQueries(filterQuery);
   }
 
@@ -255,68 +281,66 @@ public abstract class QueryGenerationBase extends QueryBase {
     return filterQuery;
   }
 
-  public String buildQueryFromJSONCompHost(String jsonHCNames,
-                                           String selectedComponent) {
-    String queryHostComponent = "";
-    // Building and adding exclude string to filters
-    String selectedCompQuery = "";
-
-    if (selectedComponent != null && !selectedComponent.equals("")) {
-      String[] selectedComponents = selectedComponent.split(",");
-      selectedCompQuery = solrUtil.orList(LogSearchConstants.SOLR_COMPONENT, selectedComponents);
-
-    }
-
-    // Building Query of Host and Components from given json
-    if (jsonHCNames != null && !jsonHCNames.equals("")
-      && !jsonHCNames.equals("[]")) {
-
-      try {
-        JSONArray jarray = new JSONArray(jsonHCNames);
-        int flagHost = 0;
-        int flagComp;
-        int count;
-        for (int i = 0; i < jarray.length(); i++) {
-          if (flagHost == 1)
-            queryHostComponent = queryHostComponent + " OR ";
-          JSONObject jsonObject = jarray.getJSONObject(i);
-          String host = jsonObject.getString("h");
-          queryHostComponent = queryHostComponent + "( host:" + host;
-          List<String> components = JSONUtil.JSONToList(jsonObject
-            .getJSONArray("c"));
-          if (components.isEmpty())
-            queryHostComponent = queryHostComponent + " AND ";
-
-          flagComp = 0;
-          count = 0;
-          for (String comp : components) {
-            if (flagComp == 0)
-              queryHostComponent = queryHostComponent + " ( ";
-            count += 1;
-            queryHostComponent = queryHostComponent + " "
-              + " type:" + comp;
-            if (components.size() <= count)
-              queryHostComponent = queryHostComponent + " ) ";
-            else
-              queryHostComponent = queryHostComponent + " OR ";
-            flagComp = 1;
-          }
-          queryHostComponent = queryHostComponent + " ) ";
-          flagHost = 1;
-        }
-      } catch (JSONException e) {
-        logger.error(e);
-      }
-    }
-    if (selectedCompQuery != null && !selectedCompQuery.equals("")) {
-      if (queryHostComponent == null || queryHostComponent.equals(""))
-        queryHostComponent = selectedCompQuery;
-      else
-        queryHostComponent = queryHostComponent + " OR "
-          + selectedCompQuery;
-    }
-    return queryHostComponent;
-  }
+//  public String buildQueryFromJSONCompHost(String jsonHCNames,
+//      String selectedComponent) {
+//    String queryHostComponent = "";
+//    // Building and adding exclude string to filters
+//    String selectedCompQuery = "";
+//    if (!stringUtil.isEmpty(selectedComponent)) {
+//      String[] selectedComponents = selectedComponent
+//          .split(LogSearchConstants.LIST_SEPARATOR);
+//      selectedCompQuery = solrUtil.orList(LogSearchConstants.SOLR_COMPONENT,
+//          selectedComponents);
+//    }
+//
+//    // Building Query of Host and Components from given json
+//    if (jsonHCNames != null && !jsonHCNames.equals("")
+//        && !jsonHCNames.equals("[]")) {
+//
+//      try {
+//        JSONArray jarray = new JSONArray(jsonHCNames);
+//        int flagHost = 0;
+//        int flagComp;
+//        int count;
+//        for (int i = 0; i < jarray.length(); i++) {
+//          if (flagHost == 1)
+//            queryHostComponent = queryHostComponent + " OR ";
+//          JSONObject jsonObject = jarray.getJSONObject(i);
+//          String host = jsonObject.getString("h");
+//          queryHostComponent = queryHostComponent + "( host:" + host;
+//          List<String> components = JSONUtil.JSONToList(jsonObject
+//              .getJSONArray("c"));
+//          if (!components.isEmpty())
+//            queryHostComponent = queryHostComponent + " AND ";
+//
+//          flagComp = 0;
+//          count = 0;
+//          for (String comp : components) {
+//            if (flagComp == 0)
+//              queryHostComponent = queryHostComponent + " ( ";
+//            count += 1;
+//            queryHostComponent = queryHostComponent + " " + " type:" + comp;
+//            if (components.size() <= count)
+//              queryHostComponent = queryHostComponent + " ) ";
+//            else
+//              queryHostComponent = queryHostComponent + " OR ";
+//            flagComp = 1;
+//          }
+//          queryHostComponent = queryHostComponent + " ) ";
+//          flagHost = 1;
+//        }
+//      } catch (JSONException e) {
+//        logger.error(e);
+//      }
+//    }
+//    if (selectedCompQuery != null && !selectedCompQuery.equals("")) {
+//      if (queryHostComponent == null || queryHostComponent.equals(""))
+//        queryHostComponent = selectedCompQuery;
+//      else
+//        queryHostComponent = queryHostComponent + " OR " + selectedCompQuery;
+//    }
+//    return queryHostComponent;
+//  }
 
   // JSON BuildMethods
 
@@ -386,4 +410,60 @@ public abstract class QueryGenerationBase extends QueryBase {
     logger.info("Build JSONQuery is :- " + jsonQuery);
     return jsonQuery;
   }
+
+  public String buildListQuery(String paramValue, String solrFieldName,
+      CONDITION condition) {
+    if (!stringUtil.isEmpty(paramValue)) {
+      String[] values = paramValue.split(LogSearchConstants.LIST_SEPARATOR);
+      switch (condition) {
+      case OR:
+        return solrUtil.orList(solrFieldName, values,"*");
+      case AND:
+        return solrUtil.andList(solrFieldName, values, "");
+      default:
+        logger.error("Invalid condition " + condition.name());
+      }
+    }
+    return "";
+  }
+
+
+  public void addFilterQueryFromArray(SolrQuery solrQuery, String jsonArrStr,
+      String solrFieldName, CONDITION condition) {
+    if (!stringUtil.isEmpty(jsonArrStr) && condition != null && solrQuery!=null) {
+      Gson gson = new Gson();
+      String[] arr = null;
+      try {
+        arr = gson.fromJson(jsonArrStr, String[].class);
+      } catch (Exception exception) {
+        logger.error("Invaild json array:" + jsonArrStr);
+        return;
+      }
+      String query;;
+      switch (condition) {
+      case OR:
+        query = solrUtil.orList(solrFieldName, arr,"");
+        break;
+      case AND:
+        query = solrUtil.andList(solrFieldName, arr, "");
+        break;
+      default:
+        query=null;
+        logger.error("Invalid condition :" + condition.name());
+      }
+      if (!stringUtil.isEmpty(query)) {
+        solrQuery.addFilterQuery(query);
+      }
+    }
+  }
+
+  public void addFilter(SolrQuery solrQuery, String paramValue,
+      String solrFieldName, CONDITION condition) {
+    String filterQuery = buildListQuery(paramValue, solrFieldName, condition);
+    if (!stringUtil.isEmpty(filterQuery)) {
+      if (solrQuery != null) {
+        solrQuery.addFilterQuery(filterQuery);
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
index 6e47d34..92bfb01 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
@@ -28,8 +28,6 @@ import javax.ws.rs.core.Response;
 
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.AuditMgr;
-import org.apache.commons.lang.StringEscapeUtils;
-import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
index 5f56ccb..1e107ed 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
@@ -114,7 +114,7 @@ public class DashboardREST {
     searchCriteria
       .addParam("startDate", request.getParameter("start_time"));
     searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getComponenetsCount(searchCriteria);
+    return logMgr.getComponentsCount(searchCriteria);
   }
 
   @GET
@@ -188,7 +188,6 @@ public class DashboardREST {
     searchCriteria.addParam("hostLogFile", request.getParameter("host"));
     searchCriteria.addParam("compLogFile",
       request.getParameter("component"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
     searchCriteria.addParam("format", request.getParameter("format"));
     searchCriteria.addParam("utcOffset", request.getParameter("utcOffset"));
     return logMgr.exportToTextFile(searchCriteria);
@@ -250,14 +249,6 @@ public class DashboardREST {
   }
 
   @GET
-  @Path("/getCurrentPageOfKeywordSearch")
-  @Produces({"application/json"})
-  public String getCurrentPageOfKeywordSearch(@Context HttpServletRequest request) {
-    String requestDate = (String) request.getParameter("requestDate");
-    return logMgr.getCurrentPageOfKeywordSearch(requestDate);
-  }
-
-  @GET
   @Path("/getAnyGraphData")
   @Produces({"application/json"})
   public String getAnyGraphData(@Context HttpServletRequest request) {
@@ -290,17 +281,6 @@ public class DashboardREST {
   }
 
   @GET
-  @Path("/getSuggestoins")
-  @Produces({"application/json"})
-  public String getSuggestions(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("fieldName", request.getParameter("fieldName"));
-    searchCriteria.addParam("valueToSuggest",
-      request.getParameter("valueToSuggest"));
-    return logMgr.getSuggestions(searchCriteria);
-  }
-
-  @GET
   @Path("/getHadoopServiceConfigJSON")
   @Produces({"application/json"})
   public String getHadoopServiceConfigJSON() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
index c459ab7..40b215c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
@@ -33,8 +33,6 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.UserConfigMgr;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.view.VLogfeederFilter;
-import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
 import org.apache.ambari.logsearch.view.VUserConfig;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;


[9/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/888faf26
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/888faf26
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/888faf26

Branch: refs/heads/trunk
Commit: 888faf260d2d9fc454072ed69d1149c733ca98f0
Parents: 25ed583
Author: oleewere <ol...@gmail.com>
Authored: Mon Apr 25 16:48:31 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Mon Apr 25 17:10:34 2016 +0200

----------------------------------------------------------------------
 .../logsearch/appender/LogsearchAppender.java   |   50 -
 .../logsearch/appender/LogsearchConversion.java |    6 +
 .../appender/LogsearchRollingFileAppender.java  |   43 +
 .../src/test/resources/log4j.properties         |   11 +-
 .../ambari-logsearch-logfeeder/build.xml        |    3 -
 .../ambari-logsearch-logfeeder/pom.xml          |    8 +-
 .../apache/ambari/logfeeder/LogFeederUtil.java  |   11 +-
 .../logconfig/FetchConfigFromSolr.java          |   52 +-
 .../logfeeder/logconfig/LogfeederScheduler.java |    3 +-
 .../logconfig/filter/ApplyLogFilter.java        |    7 +-
 .../logconfig/filter/DefaultDataFilter.java     |    5 +-
 .../logconfig/filter/FilterLogData.java         |    2 +-
 .../apache/ambari/logfeeder/output/Output.java  |   14 +
 .../ambari/logfeeder/output/OutputKafka.java    |    2 +-
 .../ambari/logfeeder/output/OutputSolr.java     |    3 +-
 .../apache/ambari/logfeeder/util/SolrUtil.java  |   28 +-
 .../src/main/resources/config.json.j2           |   21 +-
 .../src/main/resources/log4j.xml                |   17 +-
 .../src/main/scripts/run.sh                     |    6 +
 .../logfeeder/filter/JSONFilterCodeTest.java    |   10 +-
 .../ambari-logsearch-portal/build.xml           |    3 -
 .../ambari-logsearch-portal/pom.xml             |    7 +-
 ambari-logsearch/ambari-logsearch-portal/run.sh |    8 +-
 .../configsets/audit_logs/conf/managed-schema   |    9 +-
 .../configsets/hadoop_logs/conf/managed-schema  |   37 +-
 .../main/configsets/history/conf/managed-schema |   98 +
 .../src/main/configsets/history/conf/schema.xml |   49 -
 .../main/configsets/history/conf/solrconfig.xml | 1885 +++++++++++++++++-
 .../logsearch/common/LogSearchConstants.java    |   25 +
 .../logsearch/common/ManageStartEndTime.java    |    6 +-
 .../ambari/logsearch/common/MessageEnums.java   |   33 +-
 .../ambari/logsearch/common/SearchCriteria.java |    9 +-
 .../logsearch/common/UserSessionInfo.java       |    2 +-
 .../ambari/logsearch/dao/AuditSolrDao.java      |    5 +
 .../logsearch/dao/ServiceLogsSolrDao.java       |    6 +
 .../ambari/logsearch/dao/SolrDaoBase.java       |  113 +-
 .../ambari/logsearch/dao/UserConfigSolrDao.java |    5 +
 .../logsearch/graph/GraphDataGenerator.java     |  414 ++++
 .../logsearch/graph/GraphDataGeneratorBase.java |  316 ++-
 .../logsearch/graph/GraphDataGnerator.java      |  397 ----
 .../ambari/logsearch/manager/AuditMgr.java      |  302 ++-
 .../ambari/logsearch/manager/LogFileMgr.java    |   50 +-
 .../ambari/logsearch/manager/LogsMgr.java       | 1460 +++++++-------
 .../ambari/logsearch/manager/MgrBase.java       |  104 +-
 .../ambari/logsearch/manager/PublicMgr.java     |   11 +-
 .../ambari/logsearch/manager/UserConfigMgr.java |  212 +-
 .../ambari/logsearch/query/QueryGeneration.java |  465 +++--
 .../logsearch/query/QueryGenerationBase.java    |  338 ++--
 .../apache/ambari/logsearch/rest/AuditREST.java |    2 -
 .../ambari/logsearch/rest/DashboardREST.java    |   22 +-
 .../ambari/logsearch/rest/UserConfigREST.java   |    2 -
 .../apache/ambari/logsearch/util/BizUtil.java   |  292 ++-
 .../ambari/logsearch/util/ConfigUtil.java       |   29 +-
 .../apache/ambari/logsearch/util/DateUtil.java  |   54 +-
 .../apache/ambari/logsearch/util/FileUtil.java  |   42 +-
 .../apache/ambari/logsearch/util/JSONUtil.java  |    5 +-
 .../apache/ambari/logsearch/util/QueryBase.java |   61 +-
 .../ambari/logsearch/util/RESTErrorUtil.java    |    4 +-
 .../apache/ambari/logsearch/util/SolrUtil.java  |  177 +-
 .../ambari/logsearch/util/StringUtil.java       |    6 +-
 .../logsearch/util/XMLPropertiesUtil.java       |   57 +-
 .../ambari/logsearch/view/VBarDataList.java     |    6 +-
 .../ambari/logsearch/view/VCountList.java       |    3 +-
 .../ambari/logsearch/view/VGroupList.java       |    3 +-
 .../org/apache/ambari/logsearch/view/VHost.java |    4 +-
 .../ambari/logsearch/view/VNameValue.java       |    5 +-
 .../ambari/logsearch/view/VNameValueList.java   |    2 +-
 .../apache/ambari/logsearch/view/VNodeList.java |    2 +-
 .../ambari/logsearch/view/VSolrLogList.java     |    6 +-
 .../apache/ambari/logsearch/view/VSummary.java  |   16 +-
 .../ambari/logsearch/view/VSummaryCount.java    |    8 +-
 .../logsearch/view/VSummaryCountList.java       |    2 +-
 .../ambari/logsearch/view/VUserConfig.java      |    8 +-
 .../ambari/logsearch/view/VUserConfigList.java  |    8 +-
 .../LogsearchLogoutSuccessHandler.java          |    2 +-
 .../web/listeners/SpringEventListener.java      |    2 +-
 ...LogsearchAbstractAuthenticationProvider.java |    4 +-
 .../LogsearchAuthenticationProvider.java        |    6 +-
 .../LogsearchFileAuthenticationProvider.java    |    4 +
 .../LogsearchLdapAuthenticationProvider.java    |    4 +
 .../LogsearchSimpleAuthenticationProvider.java  |    4 +
 .../src/main/resources/default.properties       |    1 +
 .../src/main/resources/log4j.xml                |   47 +-
 .../src/main/resources/logsearch.properties     |   14 +-
 .../src/main/resources/logsearch.properties.j2  |    5 +
 .../src/main/scripts/run.sh                     |    5 +
 .../src/main/webapp/index.html                  |    6 +-
 .../src/main/webapp/scripts/utils/Utils.js      |   72 +-
 .../src/main/webapp/scripts/utils/ViewUtils.js  |    7 +-
 .../main/webapp/scripts/views/common/Header.js  |    7 +-
 .../webapp/scripts/views/common/TableLayout.js  |    2 +-
 .../dashboard/BubbleGraphTableLayoutView.js     |   29 +-
 .../scripts/views/dashboard/HostListView.js     |    5 +-
 .../views/dialog/SaveSearchFilterView.js        |    4 +-
 .../views/filter/CreateLogfeederFilterView.js   |  113 +-
 .../webapp/scripts/views/tabs/LogFileView.js    |   13 +-
 .../main/webapp/scripts/views/tabs/TreeView.js  |    1 -
 .../troubleshoot/TroubleShootLayoutView.js      |   70 +-
 .../src/main/webapp/styles/style.css            |  120 +-
 .../src/main/webapp/styles/style_v2.css         |    2 +-
 .../webapp/templates/common/Header_tmpl.html    |   13 +-
 .../dashboard/MainLayoutView_tmpl.html          |    4 +-
 .../filter/CreateLogfeederFilter_tmpl.html      |   19 +-
 .../webapp/templates/tabs/LogFileView_tmpl.html |    4 +-
 .../TroubleShootLayoutView_tmpl.html            |    3 +-
 ambari-logsearch/pom.xml                        |    1 +
 106 files changed, 5384 insertions(+), 2706 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java
deleted file mode 100644
index 4339a21..0000000
--- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchAppender.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.appender;
-
-import org.apache.log4j.DailyRollingFileAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
-
-public class LogsearchAppender extends DailyRollingFileAppender {
-  private static Logger logger = Logger.getLogger(LogsearchAppender.class);
-
-  public LogsearchAppender() {
-    logger.debug("Initializing LogsearchAppender........... ");
-  }
-
-  @Override
-  public void append(LoggingEvent event) {
-    super.append(event);
-  }
-
-  @Override
-  public void setLayout(Layout layout) {
-    super.setLayout(layout);
-  }
-
-  protected void subAppend(LoggingEvent event) {
-    this.qw.write(this.layout.format(event));
-    if (shouldFlush(event)) {
-      this.qw.flush();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
index b9e7527..dbdfe6c 100644
--- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
+++ b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
@@ -70,4 +70,10 @@ public class LogsearchConversion extends EnhancedPatternLayout {
     e.printStackTrace(pw);
     return sw.toString();
   }
+  
+  @Override
+  public boolean ignoresThrowable() {
+    //set false to ignore exception stacktrace
+    return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java
new file mode 100644
index 0000000..6a93db5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchRollingFileAppender.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.RollingFileAppender;
+import org.apache.log4j.spi.LoggingEvent;
+
+public class LogsearchRollingFileAppender extends RollingFileAppender {
+  private static Logger logger = Logger.getLogger(LogsearchRollingFileAppender.class);
+
+  public LogsearchRollingFileAppender() {
+    logger.trace("Initializing LogsearchRollingFileAppender........... ");
+  }
+
+  @Override
+  public void append(LoggingEvent event) {
+    super.append(event);
+  }
+
+  @Override
+  public void setLayout(Layout layout) {
+    super.setLayout(layout);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties b/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties
index b51e87d..78fb66b 100644
--- a/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties
+++ b/ambari-logsearch/ambari-logsearch-appender/src/test/resources/log4j.properties
@@ -13,11 +13,12 @@
 # log4j configuration used during build and unit tests
 
 # Root logger option
-log4j.rootLogger=ALL, logsearchJson
+log4j.rootLogger=INFO, logsearchJson
 
 # Redirect log messages to a logsearch json 
-log4j.appender.logsearchJson=org.apache.ambari.logsearch.appender.LogsearchAppender
-log4j.appender.logsearchJson.File=target/logsearch/log.json
-log4j.appender.logsearchJson.datePattern='.'yyyy-MM-dd
-log4j.appender.allLog.Append=true
+log4j.appender.logsearchJson=org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender
+log4j.appender.logsearchJson.File=target/jsonlog/log.json
+log4j.appender.logsearchJson.maxFileSize=10MB
+log4j.appender.logsearchJson.maxBackupIndex=10
+log4j.appender.logsearchJson.Append=true
 log4j.appender.logsearchJson.layout=org.apache.ambari.logsearch.appender.LogsearchConversion
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/build.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/build.xml b/ambari-logsearch/ambari-logsearch-logfeeder/build.xml
index 53f893e..738b2ef 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/build.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/build.xml
@@ -32,9 +32,6 @@
     <copy todir="target/package/libs" includeEmptyDirs="no">
       <fileset dir="target/libs"/>
     </copy>
-    <copy todir="target/package/" includeEmptyDirs="no">
-      <fileset file="target/LogFeeder.jar"/>
-    </copy>
     <copy todir="target/package/classes" includeEmptyDirs="no">
       <fileset dir="target/classes"/>
     </copy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
index 0888010..dc1b361 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
@@ -39,6 +39,11 @@
 
   <dependencies>
     <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-appender</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>
@@ -79,6 +84,7 @@
       <artifactId>commons-logging</artifactId>
       <version>1.1.1</version>
     </dependency>
+
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -109,8 +115,8 @@
       <artifactId>jackson-xc</artifactId>
       <version>1.9.13</version>
     </dependency>
-  </dependencies>
 
+  </dependencies>
   <build>
     <finalName>LogFeeder</finalName>
     <pluginManagement>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
index 7303694..7a30d72 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
@@ -33,9 +33,11 @@ import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
 import org.apache.ambari.logfeeder.mapper.Mapper;
 import org.apache.ambari.logfeeder.output.Output;
 import org.apache.commons.lang3.StringUtils;
@@ -58,6 +60,7 @@ public class LogFeederUtil {
 
   final static int HASH_SEED = 31174077;
   public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  public final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
   static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
 
   static Properties props;
@@ -324,6 +327,9 @@ public class LogFeederUtil {
   }
 
   public static Map<String, Object> toJSONObject(String jsonStr) {
+    if(jsonStr==null || jsonStr.trim().isEmpty()){
+      return new HashMap<String, Object>();
+    }
     Type type = new TypeToken<Map<String, Object>>() {
     }.getType();
     return gson.fromJson(jsonStr, type);
@@ -380,7 +386,8 @@ public class LogFeederUtil {
 
   public static String getDate(String timeStampStr) {
     try {
-      DateFormat sdf = new SimpleDateFormat(DATE_FORMAT);
+      DateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
+      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
       Date netDate = (new Date(Long.parseLong(timeStampStr)));
       return sdf.format(netDate);
     } catch (Exception ex) {
@@ -468,7 +475,7 @@ public class LogFeederUtil {
               return true;
             }
           }
-          if (value.equalsIgnoreCase("ALL")) {
+          if (value.equalsIgnoreCase(LogFeederConstants.ALL)) {
             return true;
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
index fc12458..e645a3d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
@@ -40,12 +40,18 @@ public class FetchConfigFromSolr extends Thread {
   private static String endTimeDateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS";//2016-04-05T04:30:00.000Z
   private static String sysTimeZone = "GMT";
 
-  public FetchConfigFromSolr() {
+  public FetchConfigFromSolr(boolean isDaemon) {
     this.setName(this.getClass().getSimpleName());
+    this.setDaemon(isDaemon);
   }
 
   @Override
   public void run() {
+    String zkHosts = LogFeederUtil.getStringProperty("logfeeder.solr.zkhosts");
+    if( zkHosts == null || zkHosts.trim().length() == 0 ) {
+      logger.warn("Solr ZKHosts for UserConfig/History is not set. Won't look for level configuration from Solr.");
+      return;
+    }
     solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.internal", solrConfigInterval);
     delay = 1000 * solrConfigInterval;
     do {
@@ -60,11 +66,14 @@ public class FetchConfigFromSolr extends Thread {
   }
 
   private synchronized void pullConfigFromSolr() {
-    HashMap<String, Object> configDocMap = SolrUtil.getInstance().getConfigDoc();
-    if (configDocMap != null) {
-      String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
-      if (configJson != null) {
-        logfeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, VLogfeederFilterWrapper.class);
+    SolrUtil solrUtil = SolrUtil.getInstance();
+    if(solrUtil!=null){
+      HashMap<String, Object> configDocMap = solrUtil.getConfigDoc();
+      if (configDocMap != null) {
+        String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
+        if (configJson != null) {
+          logfeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, VLogfeederFilterWrapper.class);
+        }
       }
     }
   }
@@ -117,19 +126,28 @@ public class FetchConfigFromSolr extends Thread {
     List<String> hosts = componentFilter.getHosts();
     List<String> defaultLevels = componentFilter.getDefaultLevels();
     List<String> overrideLevels = componentFilter.getOverrideLevels();
-    if (LogFeederUtil.isListContains(hosts, hostName, false)) {
-      if (isFilterExpired(componentFilter)) {
-        // pick default
-        logger.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at "
-          + componentFilter.getExpiryTime());
-        return defaultLevels;
-      } else {
-        // return tmp filter levels
-        return overrideLevels;
+    String expiryTime=componentFilter.getExpiryTime();
+    //check is user override or not
+    if ((expiryTime != null && !expiryTime.isEmpty())
+        || (overrideLevels != null && !overrideLevels.isEmpty())
+        || (hosts != null && !hosts.isEmpty())) {
+      if (hosts == null || hosts.isEmpty()) {
+        // hosts list is empty or null consider it apply on all hosts
+        hosts.add(LogFeederConstants.ALL);
+      }
+      if (LogFeederUtil.isListContains(hosts, hostName, false)) {
+        if (isFilterExpired(componentFilter)) {
+          // pick default
+          logger.debug("Filter for component " + componentName + " and host :"
+              + hostName + " is expired at " + componentFilter.getExpiryTime());
+          return defaultLevels;
+        } else {
+          // return tmp filter levels
+          return overrideLevels;
+        }
       }
-    } else {
-      return defaultLevels;
     }
+    return defaultLevels;
   }
 
   public static VLogfeederFilter findComponentFilter(String componentName) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
index 7525dff..128c5c4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
@@ -52,7 +52,8 @@ public enum LogfeederScheduler {
 
   private List<Thread> getThreadList() {
     List<Thread> tasks = new ArrayList<Thread>();
-    tasks.add(new FetchConfigFromSolr());
+    Thread configMonitor = new FetchConfigFromSolr(true);
+    tasks.add(configMonitor);
     return tasks;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
index 3748445..f223207 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
@@ -50,7 +50,12 @@ public class ApplyLogFilter extends DefaultDataFilter {
             //return default value if there is no filter found for particular component
             return defaultValue;
           }
-          List<String> allowedLevels = FetchConfigFromSolr.getAllowedLevels(hostName, componentFilter);
+          List<String> allowedLevels = FetchConfigFromSolr.getAllowedLevels(
+              hostName, componentFilter);
+          if (allowedLevels == null || allowedLevels.isEmpty()) {
+            // if allowedlevels list is empty then allow everything
+            allowedLevels.add(LogFeederConstants.ALL);
+          }
           return LogFeederUtil.isListContains(allowedLevels, level, false);
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
index 9e98c6a..a064663 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
@@ -18,7 +18,6 @@
  */
 package org.apache.ambari.logfeeder.logconfig.filter;
 
-import java.util.List;
 import java.util.Map;
 
 import org.apache.log4j.Logger;
@@ -28,9 +27,7 @@ import org.apache.log4j.Logger;
  */
 public class DefaultDataFilter {
   private static Logger logger = Logger.getLogger(DefaultDataFilter.class);
-
-  protected static final boolean CASE_SENSITIVE = false;
-
+  
   public boolean applyFilter(Map<String, Object> outputJsonObj, boolean defaultValue) {
     return defaultValue;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
index 643df98..bf33f93 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
@@ -40,7 +40,7 @@ public enum FilterLogData {
       return defaultValue;
     }
     Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
-    return applyLogFilter.applyFilter(jsonObj, defaultValue);
+    return isAllowed(jsonObj);
   }
 
   public boolean isAllowed(Map<String, Object> jsonObj) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
index dd67d07..c067680 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
@@ -22,6 +22,7 @@ package org.apache.ambari.logfeeder.output;
 import java.lang.reflect.Type;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 import org.apache.ambari.logfeeder.ConfigBlock;
 import org.apache.ambari.logfeeder.LogFeederUtil;
@@ -115,5 +116,18 @@ public abstract class Output extends ConfigBlock {
     logStatForMetric(writeBytesMetric, "Stat: Bytes Written");
 
   }
+  
+  public void trimStrValue(Map<String, Object> jsonObj) {
+    if (jsonObj != null) {
+      for (Entry<String, Object> entry : jsonObj.entrySet()) {
+        String key = entry.getKey();
+        Object value = entry.getValue();
+        if (value != null && value instanceof String) {
+          String valueStr = value.toString().trim();
+          jsonObj.put(key, valueStr);
+        }
+      }
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
index cd4f951..efbc366 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
@@ -283,4 +283,4 @@ public class OutputKafka extends Output {
       }
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index 215f691..6fb0b0e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -194,6 +194,7 @@ public class OutputSolr extends Output {
   @Override
   public void write(Map<String, Object> jsonObj, InputMarker inputMarker) throws Exception {
     try {
+      trimStrValue(jsonObj);
       outgoingBuffer.put(new OutputData(jsonObj, inputMarker));
     } catch (InterruptedException e) {
       // ignore
@@ -442,4 +443,4 @@ public class OutputSolr extends Output {
       return localBuffer.isEmpty();
     }
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
index 4265dc6..200a603 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 
 import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -64,7 +65,12 @@ public class SolrUtil {
           try {
             instance = new SolrUtil();
           } catch (Exception e) {
-            logger.error(e);
+            final String LOG_MESSAGE_KEY = SolrUtil.class
+                .getSimpleName() + "_SOLR_UTIL";
+              LogFeederUtil.logErrorMessageByInterval(
+                LOG_MESSAGE_KEY,
+                "Error constructing solrUtil", e, logger,
+                Level.WARN);
           }
         }
       }
@@ -162,6 +168,7 @@ public class SolrUtil {
       QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
       return queryResponse;
     } else {
+      logger.error("solrClient can't be null");
       return null;
     }
   }
@@ -176,15 +183,18 @@ public class SolrUtil {
     String fq = LogFeederConstants.ROW_TYPE + ":" + LogFeederConstants.NAME;
     solrQuery.setFilterQueries(fq);
     try {
-      QueryResponse response = SolrUtil.getInstance().process(solrQuery);
-      SolrDocumentList documentList = response.getResults();
-      if (documentList != null && documentList.size() > 0) {
-        SolrDocument configDoc = documentList.get(0);
-        String configJson = LogFeederUtil.getGson().toJson(configDoc);
-        configMap = (HashMap<String, Object>) LogFeederUtil.toJSONObject(configJson);
+      QueryResponse response = process(solrQuery);
+      if (response != null) {
+        SolrDocumentList documentList = response.getResults();
+        if (documentList != null && documentList.size() > 0) {
+          SolrDocument configDoc = documentList.get(0);
+          String configJson = LogFeederUtil.getGson().toJson(configDoc);
+          configMap = (HashMap<String, Object>) LogFeederUtil
+              .toJSONObject(configJson);
+        }
       }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
+    } catch (Exception e) {
+      logger.error("Error getting config", e);
     }
     return configMap;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
index 163ee2b..1c5ee8d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
@@ -179,17 +179,17 @@
 		{
 			"type":"logsearch_app",
 			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch.log"
+			"path":"{{logsearch_log_dir}}/logsearch.json"
 		},
 		{
 			"type":"logsearch_feeder",
 			"rowtype":"service",
-			"path":"{{logfeeder_log_dir}}/logfeeder.log"
+			"path":"{{logfeeder_log_dir}}/logfeeder.json"
 		},
 		{
 			"type":"logsearch_perf",
 			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch-performance.log"
+			"path":"{{logsearch_log_dir}}/logsearch-performance.json"
 		},
 		{
 			"type":"ranger_admin",
@@ -625,13 +625,22 @@
 			
 		},
 		{
+		"filter": "json",
+		"conditions": {
+			"fields": {
+				"type": [
+					"logsearch_app",
+					"logsearch_feeder",
+					"logsearch_perf"
+					]
+				}
+			}
+ 		},
+		{
 			"filter":"grok",
 			"conditions":{
 				"fields":{
 					"type":[
-						"logsearch_app",
-						"logsearch_feeder",
-						"logsearch_perf",
 						"ranger_admin",
 						"ranger_dbpatch"
 					]

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
index 0717477..7ef967c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
@@ -33,24 +33,35 @@
     <layout class="org.apache.log4j.PatternLayout"> 
       <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n"/> 
     </layout> 
+  </appender>
+
+  <appender name="rolling_file_json"
+    class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
+    <param name="file" value="logs/logsearch-logfeeder.json" />
+		<param name="append" value="true" />
+		<param name="maxFileSize" value="10MB" />
+		<param name="maxBackupIndex" value="10" />
+    <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
   </appender> 
+ 
 
   <!-- Logs to suppress BEGIN -->
   <category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
     <priority value="error" />
-    <appender-ref ref="rolling_file" />
+    <appender-ref ref="daily_rolling_file" />
   </category>
 
   <category name="apache.solr.client.solrj.impl.CloudSolrClient" additivity="false">
     <priority value="fatal" />
-    <appender-ref ref="rolling_file" />
+    <appender-ref ref="daily_rolling_file" />
   </category>
   <!-- Logs to suppress END -->
 
   <category name="org.apache.ambari.logfeeder" additivity="false">
-    <priority value="info" />
+    <priority value="INFO" />
     <appender-ref ref="console" /> 
     <!-- <appender-ref ref="daily_rolling_file" /> -->
+    <appender-ref ref="rolling_file_json"/>
   </category>
 
   <root>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
index 1e7185a..356a358 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh
@@ -47,6 +47,12 @@ fi
 
 if [ "$LOGFEEDER_CONF_DIR" = "" ]; then
     LOGFEEDER_CONF_DIR="/etc/logfeeder/conf"
+    if [ ! -d $LOGFEEDER_CONF_DIR ]; then
+      if [ -d $script_dir/classes ]; then
+	  LOGFEEDER_CONF_DIR=$script_dir/classes
+      fi
+  fi
+
 fi
 
 LOGFEEDER_GC_LOGFILE=`dirname $LOGFILE`/logfeeder_gc.log

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/JSONFilterCodeTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/JSONFilterCodeTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/JSONFilterCodeTest.java
index ebfd0f5..fd14b48 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/JSONFilterCodeTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/JSONFilterCodeTest.java
@@ -18,10 +18,12 @@
 
 package org.apache.ambari.logfeeder.filter;
 
+import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.OutputMgr;
@@ -64,7 +66,9 @@ public class JSONFilterCodeTest {
     EasyMock.replay(mockOutputMgr);
 
     Date d = new Date();
-    String dateString = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).format(d);
+    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+    String dateString = sdf.format(d);
     jsonFilterCode.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputMarker());
 
     EasyMock.verify(mockOutputMgr);
@@ -86,7 +90,9 @@ public class JSONFilterCodeTest {
     EasyMock.replay(mockOutputMgr);
 
     Date d = new Date();
-    String dateString = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).format(d);
+    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+    String dateString = sdf.format(d);
     jsonFilterCode.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputMarker());
 
     EasyMock.verify(mockOutputMgr);

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/build.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/build.xml b/ambari-logsearch/ambari-logsearch-portal/build.xml
index 22c7938..d6db5f0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/build.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/build.xml
@@ -38,9 +38,6 @@
     <copy todir="target/package/libs" includeEmptyDirs="no">
       <fileset dir="target/libs"/>
     </copy>
-    <copy todir="target/package/" includeEmptyDirs="no">
-      <fileset file="target/LogSearch.jar"/>
-    </copy>
     <copy todir="target/package/classes" includeEmptyDirs="no">
       <fileset dir="target/classes"/>
     </copy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index 9c5ba72..799e95e 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -163,6 +163,7 @@
                 <exclude>src/main/webapp/styles/animate.css</exclude>
                 <exclude>**/*.json</exclude>
                 <exclude>src/main/webapp/styles/visualsearch.css</exclude>
+                <exclude>**/*.log</exclude>
               </excludes>
             </configuration>
             <executions>
@@ -542,6 +543,10 @@
       <artifactId>commons-io</artifactId>
       <version>2.4</version>
     </dependency>
-
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-appender</artifactId>
+      <version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/run.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/run.sh b/ambari-logsearch/ambari-logsearch-portal/run.sh
index 4b1be8b..ae3c779 100755
--- a/ambari-logsearch/ambari-logsearch-portal/run.sh
+++ b/ambari-logsearch/ambari-logsearch-portal/run.sh
@@ -17,6 +17,8 @@ echo "
 ███████╗╚██████╔╝╚██████╔╝    ███████║███████╗██║  ██║██║  ██║╚██████╗██║  ██║
 ╚══════╝ ╚═════╝  ╚═════╝     ╚══════╝╚══════╝╚═╝  ╚═╝╚═╝  ╚═╝ ╚═════╝╚═╝  ╚═╝
 "
-mvn clean compile -Pdev
-mvn exec:java -Pdev
-
+cd ..
+mvn clean compile package -Pdev
+cd ambari-logsearch-portal
+#mvn exec:java -Pdev
+java -cp target/libs/*:target/classes/ org.apache.ambari.logsearch.LogSearch

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
index 4a6d3ac..d2a1b6d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
@@ -47,6 +47,7 @@
   <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
     <analyzer>
       <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
     </analyzer>
   </fieldType>
   <fieldType name="n_gram" class="solr.TextField" sortMissingLast="true" omitNorms="true">
@@ -103,17 +104,13 @@
   <field name="req_self_id" type="key_lower_case" multiValued="false"/>
   <field name="event_md5" type="string" multiValued="false"/>
   <field name="file" type="key_lower_case" multiValued="false"/>
-  <field name="log_message" type="key_lower_case" multiValued="false" omitNorms="false"/>
+  <field name="log_message" type="text_std_token_lower_case" multiValued="false" omitNorms="false"/>
   <field name="logfile_line_number" type="tint" omitNorms="false"/>
-  <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/>
+  <!-- <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/> -->
   <field name="message_md5" type="string" multiValued="false"/>
   <field name="type" type="key_lower_case" multiValued="false"/>
   
   <dynamicField name='ngram_*' type="n_gram" multiValued="false" stored="false"/>
   <dynamicField name='std_*' type="text_std_token_lower_case" multiValued="false" stored="false"/>
   
-  <!-- Copying everything text -->
-  <copyField source="*" dest="text" maxChars="25000" />
-  
-  
 </schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
index fa33516..cad0955 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
@@ -1,4 +1,21 @@
 <?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
 <!-- Solr managed schema - automatically generated - DO NOT EDIT -->
 <schema name="hadoop-logs-schema" version="1.5">
   <uniqueKey>id</uniqueKey>
@@ -51,6 +68,7 @@
   <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
     <analyzer>
       <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
     </analyzer>
   </fieldType>
   <fieldType name="tfloat" class="solr.TrieFloatField" positionIncrementGap="0" precisionStep="8"/>
@@ -78,12 +96,12 @@
   <field name="ip" type="ip_address" multiValued="false"/>
   <field name="level" type="key_lower_case" multiValued="false"/>
   <field name="line_number" type="tint" omitNorms="false"/>
-  <field name="log_message" type="key_lower_case" multiValued="false" omitNorms="false"/>
+  <field name="log_message" type="text_std_token_lower_case" multiValued="false" omitNorms="false"/>
   <field name="logfile_line_number" type="tint" omitNorms="false"/>
   <field name="logger_name" type="key_lower_case" multiValued="false"/>
   <field name="logtime" type="tdate" multiValued="false"  docValues="true"/>
   <field name="logtype" type="key_lower_case" multiValued="false"/>
-  <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/>
+  <!-- <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/> -->
   <field name="message_md5" type="string" multiValued="false"/>
   <field name="method" type="key_lower_case" multiValued="false" omitNorms="false"/>
   <field name="path" type="key_lower_case" multiValued="false"/>
@@ -96,11 +114,16 @@
   
   <dynamicField name='ngram_*' type="n_gram" multiValued="false" stored="false"/>
   <dynamicField name='std_*' type="text_std_token_lower_case" multiValued="false" stored="false"/>
-  
+  <dynamicField name='key_*' type="key_lower_case" multiValued="false" stored="false"/>
+  <dynamicField name="ws_*" type="text_ws" multiValued="false" omitNorms="false" stored="false"/>
+  <copyField source="log_message" dest="key_log_message"/>
+  <!-- <copyField source="log_message" dest="ws_log_message"/> -->
+
   
   <!-- Copy Fields-->
   <!-- ngram fields -->
   <!-- Whenever using a copy field provide following structure "ngram_"<OriginalFieldName> --> 
+<!-- Commenting till we test properly. Now it is not used and it is taking unnecessary cpu, memory and disk space
   <copyField source="bundle_id" dest="ngram_bundle_id"/>
   <copyField source="case_id" dest="ngram_case_id"/>
   <copyField source="cluster" dest="ngram_cluster"/>
@@ -115,11 +138,7 @@
   <copyField source="thread_name" dest="ngram_thread_name"/>
   <copyField source="type" dest="ngram_type"/>
   
-  <!-- Standard text tokenizer -->
-  <copyField source="log_message" dest="std_log_message"/>
-  
-  <!-- Copying everything text -->
-  <copyField source="*" dest="text" maxChars="25000" />
-  
+-->
+
   
 </schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/managed-schema
new file mode 100644
index 0000000..062c14f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/managed-schema
@@ -0,0 +1,98 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Solr managed schema - automatically generated - DO NOT EDIT -->
+<schema name="history" version="1.1">
+
+  <uniqueKey>composite_filtername-username</uniqueKey>
+  
+  <fieldType name="binary" class="solr.BinaryField"/>
+  <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
+  <fieldType name="booleans" class="solr.BoolField" sortMissingLast="true" multiValued="true"/>
+  <fieldType name="date" class="solr.TrieDateField" positionIncrementGap="0" precisionStep="0"/>
+  <fieldType name="double" class="solr.TrieDoubleField" positionIncrementGap="0" precisionStep="0"/>
+  <fieldType name="float" class="solr.TrieFloatField" positionIncrementGap="0" precisionStep="0"/>
+  <fieldType name="ignored" class="solr.StrField" indexed="false" stored="false" multiValued="true"/>
+  <fieldType name="int" class="solr.TrieIntField" positionIncrementGap="0" precisionStep="0"/>
+  <fieldType name="ip_address" class="solr.TextField">
+    <analyzer>
+      <tokenizer class="solr.PathHierarchyTokenizerFactory" replace="." delimiter="."/>
+    </analyzer>
+  </fieldType>
+  <fieldType name="long" class="solr.TrieLongField" positionIncrementGap="0" precisionStep="0"/>
+  <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
+  <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
+  <fieldType name="tdate" class="solr.TrieDateField" positionIncrementGap="0" precisionStep="6"/>
+  <fieldType name="tdates" class="solr.TrieDateField" positionIncrementGap="0" multiValued="true" precisionStep="6"/>
+  <fieldType name="tdouble" class="solr.TrieDoubleField" positionIncrementGap="0" precisionStep="8"/>
+  <fieldType name="tdoubles" class="solr.TrieDoubleField" positionIncrementGap="0" multiValued="true" precisionStep="8"/>
+  <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100" multiValued="true">
+    <analyzer>
+      <tokenizer class="solr.StandardTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+    </analyzer>
+  </fieldType>
+  <fieldType name="n_gram" class="solr.TextField" sortMissingLast="true" omitNorms="true">
+    <analyzer>
+      <tokenizer class="solr.NGramTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+    </analyzer>
+  </fieldType>
+  <fieldType name="key_lower_case" class="solr.TextField"
+               sortMissingLast="true" omitNorms="true">
+      <analyzer>
+        <tokenizer class="solr.KeywordTokenizerFactory"/>
+        <filter class="solr.LowerCaseFilterFactory" />
+      </analyzer>
+    </fieldType>
+  <fieldType name="text_std_token_lower_case" class="solr.TextField" positionIncrementGap="100" multiValued="true">
+    <analyzer>
+      <tokenizer class="solr.StandardTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+    </analyzer>
+  </fieldType>
+  <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
+    <analyzer>
+      <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+      <filter class="solr.LowerCaseFilterFactory"/>
+    </analyzer>
+  </fieldType>
+  <fieldType name="tfloat" class="solr.TrieFloatField" positionIncrementGap="0" precisionStep="8"/>
+  <fieldType name="tfloats" class="solr.TrieFloatField" positionIncrementGap="0" multiValued="true" precisionStep="8"/>
+  <fieldType name="tint" class="solr.TrieIntField" positionIncrementGap="0" precisionStep="8"/>
+  <fieldType name="tints" class="solr.TrieIntField" positionIncrementGap="0" multiValued="true" precisionStep="8"/>
+  <fieldType name="tlong" class="solr.TrieLongField" positionIncrementGap="0" precisionStep="8"/>
+  <fieldType name="tlongs" class="solr.TrieLongField" positionIncrementGap="0" multiValued="true" precisionStep="8"/>
+
+  <solrQueryParser defaultOperator="OR"/>
+
+  <field name="_version_" type="long" indexed="true" stored="true"/>
+  <field name="composite_filtername-username" type="string"/>
+  <field name="filtername" type="key_lower_case" indexed="true" required="true" stored="true"/>
+  <field name="id" type="string" required="true"/>
+  <field name="jsons" type="string"/>
+  <field name="rowtype" type="string" required="true"/>
+  <field name="share_username_list" type="string" multiValued="true"/>
+  <field name="username" type="key_lower_case" indexed="true" required="true" stored="true"/>
+
+  <dynamicField name='ngram_*' type="n_gram" multiValued="false" stored="false"/>
+  <dynamicField name='std_*' type="text_std_token_lower_case" multiValued="false" stored="false"/>
+  <dynamicField name="*_string" type="string" multiValued="true" indexed="true" stored="true"/>
+
+
+</schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/schema.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/schema.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/schema.xml
deleted file mode 100644
index 2b9515c..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/schema.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version='1.0' ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<schema name='history' version='1.1'>
-        <types>
-                <fieldtype name='string' class='solr.StrField' />
-                <fieldtype name='long' class='solr.TrieLongField' />
-
-                <fieldType name="key_lower_case" class="solr.TextField"
-                        sortMissingLast="true" omitNorms="true">
-                         <analyzer>
-                                <tokenizer class="solr.KeywordTokenizerFactory"/>
-                                <filter class="solr.LowerCaseFilterFactory" />
-                         </analyzer>
-                 </fieldType>
-        </types>
-
-        <fields>
-                <field name='id' type='string' required='true' />
-                <field name="_version_" type="long" indexed="true" stored="true"/>
-
-                <field name='username' type='key_lower_case' indexed="true" stored="true" required='true'/>
-                <field name='jsons' type='string'/>
-
-                <field name="rowtype" type='string' required='true'/>
-                <field name='filtername' type='key_lower_case' indexed="true" stored="true" required='true'/>
-
-                <field name='share_username_list' type='string' multiValued='true' />
-                <field name='composite_filtername-username' type='string'/>
-
-                <dynamicField name='*_string' type='string' multiValued='true' indexed='true' stored='true'/>
-        </fields>
-        <uniqueKey>composite_filtername-username</uniqueKey>
-        <solrQueryParser defaultOperator='OR' />
-</schema>


[7/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index 9703273..d144172 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -21,13 +21,19 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
+import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 
 @Component
 public class ServiceLogsSolrDao extends SolrDaoBase {
+
   static private Logger logger = Logger.getLogger(ServiceLogsSolrDao.class);
+  
+  public ServiceLogsSolrDao() {
+    super(LOG_TYPE.SERVICE);
+  }
 
   @PostConstruct
   public void postConstructor() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 228ddd1..147e148 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -27,8 +27,11 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.ambari.logsearch.common.LogsearchContextUtil;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
@@ -55,16 +58,21 @@ import org.springframework.beans.factory.annotation.Autowired;
 public abstract class SolrDaoBase {
   static private Logger logger = Logger.getLogger(SolrDaoBase.class);
 
-  static Logger logPerfomance = Logger
+  private static Logger logPerformance = Logger
     .getLogger("org.apache.ambari.logsearch.performance");
 
   private static final String ROUTER_FIELD = "_router_field_";
+ 
+  protected LOG_TYPE logType;
 
   @Autowired
   StringUtil stringUtil;
 
   @Autowired
   JSONUtil jsonUtil;
+  
+  @Autowired
+  RESTErrorUtil restErrorUtil;
 
   String collectionName = null;
   // List<String> collectionList = new ArrayList<String>();
@@ -81,6 +89,13 @@ public abstract class SolrDaoBase {
   private boolean populateFieldsThreadActive = false;
 
   int SETUP_RETRY_SECOND = 30;
+  
+  private boolean isZkhost=false;//by default its false
+  
+  //set logtype
+  public SolrDaoBase(LOG_TYPE logType) {
+    this.logType = logType;
+  }
 
   public SolrClient connectToSolr(String url, String zkHosts,
                                   String collection) throws Exception {
@@ -94,6 +109,7 @@ public abstract class SolrDaoBase {
         + solrDetail);
     }
     if (!stringUtil.isEmpty(zkHosts)) {
+      isZkhost=true;
       solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection;
       logger.info("Using zookeepr. " + solrDetail);
       solrClouldClient = new CloudSolrClient(zkHosts);
@@ -170,51 +186,48 @@ public abstract class SolrDaoBase {
     return status;
   }
 
-  public void setupCollections(final String splitMode,
-                               final String configName, final int numberOfShards,
-                               final int replicationFactor) throws Exception {
-    setup_status = createCollectionsIfNeeded(splitMode, configName,
-      numberOfShards, replicationFactor);
-    logger.info("Setup status for " + collectionName + " is "
-      + setup_status);
-    if (!setup_status) {
-      // Start a background thread to do setup
-      Thread setupThread = new Thread("setup_collection_"
-        + collectionName) {
-        @Override
-        public void run() {
-          logger.info("Started monitoring thread to check availability of Solr server. collection="
-            + collectionName);
-          int retryCount = 0;
-          while (true) {
-            try {
-              Thread.sleep(SETUP_RETRY_SECOND);
-              retryCount++;
-              setup_status = createCollectionsIfNeeded(splitMode,
-                configName, numberOfShards,
-                replicationFactor);
-              if (setup_status) {
-                logger.info("Setup for collection "
-                  + collectionName
-                  + " is successful. Exiting setup retry thread");
+  public void setupCollections(final String splitMode, final String configName,
+      final int numberOfShards, final int replicationFactor) throws Exception {
+    if (isZkhost) {
+      setup_status = createCollectionsIfNeeded(splitMode, configName,
+          numberOfShards, replicationFactor);
+      logger.info("Setup status for " + collectionName + " is " + setup_status);
+      if (!setup_status) {
+        // Start a background thread to do setup
+        Thread setupThread = new Thread("setup_collection_" + collectionName) {
+          @Override
+          public void run() {
+            logger
+                .info("Started monitoring thread to check availability of Solr server. collection="
+                    + collectionName);
+            int retryCount = 0;
+            while (true) {
+              try {
+                Thread.sleep(SETUP_RETRY_SECOND);
+                retryCount++;
+                setup_status = createCollectionsIfNeeded(splitMode, configName,
+                    numberOfShards, replicationFactor);
+                if (setup_status) {
+                  logger.info("Setup for collection " + collectionName
+                      + " is successful. Exiting setup retry thread");
+                  break;
+                }
+              } catch (InterruptedException sleepInterrupted) {
+                logger.info("Sleep interrupted while setting up collection "
+                    + collectionName);
                 break;
+              } catch (Exception e) {
+                logger
+                    .error("Error setting up collection=" + collectionName, e);
               }
-            } catch (InterruptedException sleepInterrupted) {
-              logger.info("Sleep interrupted while setting up collection "
-                + collectionName);
-              break;
-            } catch (Exception e) {
-              logger.error("Error setting up collection="
-                + collectionName, e);
+              logger.error("Error setting collection. collection="
+                  + collectionName + ", retryCount=" + retryCount);
             }
-            logger.error("Error setting collection. collection="
-              + collectionName + ", retryCount=" + retryCount);
           }
-        }
-
-      };
-      setupThread.setDaemon(true);
-      setupThread.start();
+        };
+        setupThread.setDaemon(true);
+        setupThread.start();
+      }
     }
     populateSchemaFields();
   }
@@ -257,7 +270,7 @@ public abstract class SolrDaoBase {
       return allCollectionList;
     } catch (SolrException e) {
       logger.error(e);
-      return null;
+      return new ArrayList<String>();
     }
   }
 
@@ -420,7 +433,7 @@ public abstract class SolrDaoBase {
   }
 
   public QueryResponse process(SolrQuery solrQuery)
-    throws SolrServerException, IOException, SolrException {
+    throws SolrServerException, IOException {
     if (solrClient != null) {
       String event = solrQuery.get("event");
       solrQuery.remove("event");
@@ -428,7 +441,7 @@ public abstract class SolrDaoBase {
         METHOD.POST);
 
       if (event != null && !"/getLiveLogsCount".equalsIgnoreCase(event)) {
-        logPerfomance.info("\n Username :- "
+        logPerformance.info("\n Username :- "
           + LogsearchContextUtil.getCurrentUsername()
           + " Event :- " + event + " SolrQuery :- " + solrQuery
           + "\nQuery Time Execution :- "
@@ -438,14 +451,16 @@ public abstract class SolrDaoBase {
       }
       return queryResponse;
     } else {
-      return null;
+      throw restErrorUtil.createRESTException(
+          "Solr configuration improper for " + logType.getLabel() +" logs",
+          MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public UpdateResponse addDocs(SolrInputDocument doc)
     throws SolrServerException, IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.add(doc);
-    logPerfomance.info("\n Username :- "
+    logPerformance.info("\n Username :- "
       + LogsearchContextUtil.getCurrentUsername()
       + " Update Time Execution :- " + updateResoponse.getQTime()
       + " Total Time Elapsed is :- "
@@ -458,7 +473,7 @@ public abstract class SolrDaoBase {
     IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.deleteByQuery(query);
     solrClient.commit();
-    logPerfomance.info("\n Username :- "
+    logPerformance.info("\n Username :- "
       + LogsearchContextUtil.getCurrentUsername()
       + " Remove Time Execution :- " + updateResoponse.getQTime()
       + " Total Time Elapsed is :- "
@@ -481,7 +496,7 @@ public abstract class SolrDaoBase {
           int retryCount = 0;
           while (true) {
             try {
-              Thread.sleep(SETUP_RETRY_SECOND);
+              Thread.sleep(SETUP_RETRY_SECOND * 1000);
               retryCount++;
               boolean _result = _populateSchemaFields();
               if (_result) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index 750cf07..b5c042d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
+import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -30,6 +31,10 @@ public class UserConfigSolrDao extends SolrDaoBase {
 
   static private Logger logger = Logger.getLogger(UserConfigSolrDao.class);
 
+  public UserConfigSolrDao() {
+    super(LOG_TYPE.SERVICE);
+  }
+
   @PostConstruct
   public void postConstructor() {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
new file mode 100644
index 0000000..92baaff
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -0,0 +1,414 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.graph;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.util.ConfigUtil;
+import org.apache.ambari.logsearch.util.DateUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.StringUtil;
+import org.apache.ambari.logsearch.view.VBarDataList;
+import org.apache.ambari.logsearch.view.VBarGraphData;
+import org.apache.ambari.logsearch.view.VNameValue;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.FacetField;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.RangeFacet;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+@Component
+public class GraphDataGenerator extends GraphDataGeneratorBase {
+
+  @Autowired
+  StringUtil stringUtil;
+
+  @Autowired
+  QueryGeneration queryGenerator;
+
+  @Autowired
+  RESTErrorUtil restErrorUtil;
+
+  @Autowired
+  DateUtil dateUtil;
+
+  private static Logger logger = Logger.getLogger(GraphDataGenerator.class);
+
+  /**
+   *
+   * @param searchCriteria
+   * @param solrDaoBase
+   * @param solrQuery
+   * @return
+   */
+  public VBarDataList getAnyGraphData(SearchCriteria searchCriteria,
+      SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
+    // X axis credentials
+    String xAxisField = (String) searchCriteria.getParamValue("xAxis");
+    String stackField = (String) searchCriteria.getParamValue("stackBy");
+    String from = (String) searchCriteria.getParamValue("from");
+    String to = (String) searchCriteria.getParamValue("to");
+    String unit = (String) searchCriteria.getParamValue("unit");
+    String suffix = (String) searchCriteria.getParamValue("suffix");
+    String typeXAxis = ConfigUtil.schemaFieldsName.get(xAxisField + suffix);
+    typeXAxis = (stringUtil.isEmpty(typeXAxis)) ? "string" : typeXAxis;
+
+    // Y axis credentials
+    String yAxisField = (String) searchCriteria.getParamValue("yAxis");
+    // add updated typeXAxis as a type parameter
+    searchCriteria.addParam("type", typeXAxis);
+    String fieldTime = (String) searchCriteria.getParamValue("fieldTime");
+    // decide graph type based on user request parameter
+    GRAPH_TYPE garphType = getGraphType(searchCriteria);
+    switch (garphType) {
+    case NORMAL_GRAPH:
+      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
+          typeXAxis, fieldTime, solrQuery);
+    case RANGE_NON_STACK_GRAPH:
+      return rangeNonStackGraph(xAxisField, yAxisField, from, to, unit,
+          solrDaoBase, typeXAxis, fieldTime, solrQuery);
+    case NON_RANGE_STACK_GRAPH:
+      return nonRangeStackGraph(xAxisField, yAxisField, stackField, from, to,
+          solrDaoBase, typeXAxis, fieldTime, solrQuery);
+    case RANGE_STACK_GRAPH:
+      return rangeStackGraph(xAxisField, yAxisField, stackField, from, to,
+          unit, solrDaoBase, typeXAxis, fieldTime, solrQuery);
+    default:
+      logger.warn("Invalid graph type :" + garphType.name());
+      return null;
+    }
+  }
+
+  private GRAPH_TYPE getGraphType(SearchCriteria searchCriteria) {
+    // default graph type is unknown
+    GRAPH_TYPE graphType = GRAPH_TYPE.UNKNOWN;
+    // X axis credentials
+    String xAxisField = (String) searchCriteria.getParamValue("xAxis");
+    String stackField = (String) searchCriteria.getParamValue("stackBy");
+    String from = (String) searchCriteria.getParamValue("from");
+    String to = (String) searchCriteria.getParamValue("to");
+    String xType = (String) searchCriteria.getParamValue("type");
+    if (xType != null) {
+      // Y axis credentials
+      String yAxisField = (String) searchCriteria.getParamValue("yAxis");
+      if (stringUtil.isEmpty(xAxisField) || stringUtil.isEmpty(yAxisField)) {
+        graphType = GRAPH_TYPE.UNKNOWN;
+      } else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
+          && !stringUtil.isEmpty(from)
+          && !(xType.contains("date") || xType.contains("time"))) {
+        // Normal Graph Type
+        graphType = GRAPH_TYPE.NORMAL_GRAPH;
+      } else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
+          && !stringUtil.isEmpty(from)
+          && (xType.contains("date") || xType.contains("time"))) {
+        // Range(Non-Stack) Graph Type
+        graphType = GRAPH_TYPE.RANGE_NON_STACK_GRAPH;
+      } else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
+          && !stringUtil.isEmpty(from)
+          && !(xType.contains("date") || xType.contains("time"))) {
+        // Non-Range Stack Graph Type
+        graphType = GRAPH_TYPE.NON_RANGE_STACK_GRAPH;
+      } else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
+          && !stringUtil.isEmpty(from)
+          && (xType.contains("date") || xType.contains("time"))) {
+        // Range Stack GraphType
+        graphType = GRAPH_TYPE.RANGE_STACK_GRAPH;
+      }
+    }
+    return graphType;
+  }
+
+  @SuppressWarnings("unchecked")
+  private VBarDataList normalGraph(String xAxisField, String yAxisField, String from,
+      String to, SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime,
+      SolrQuery solrQuery) {
+    VBarDataList dataList = new VBarDataList();
+    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
+    VBarGraphData vBarGraphData = new VBarGraphData();
+    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+    queryGenerator.setMainQuery(solrQuery, null);
+    queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from
+        + " TO " + to + "]");
+    if (typeXAxis.contains("string") || typeXAxis.contains("key_lower_case")
+        || typeXAxis.contains("text")) {
+      queryGenerator.setFacetField(solrQuery, xAxisField);
+      try {
+        QueryResponse response = solrDaoBase.process(solrQuery);
+        if (response != null && response.getResults() != null) {
+          long count = response.getResults().getNumFound();
+          if (count > 0) {
+            FacetField facetField = response.getFacetField(xAxisField);
+            if (facetField != null) {
+              List<Count> countValues = facetField.getValues();
+              if (countValues != null) {
+                for (Count countValue : countValues) {
+                  if (countValue != null) {
+                    VNameValue vNameValue = new VNameValue();
+                    vNameValue.setName(countValue.getName());
+                    vNameValue.setValue("" + countValue.getCount());
+                    vNameValues.add(vNameValue);
+                  }
+                }
+              }
+              vBarGraphData.setName(xAxisField);
+              vBarGraphDatas.add(vBarGraphData);
+              dataList.setGraphData(vBarGraphDatas);
+            }
+          }
+        }
+        if (xAxisField.equalsIgnoreCase(LogSearchConstants.SOLR_LEVEL)) {
+          List<String> logLevels = new ArrayList<String>();
+          logLevels.add(LogSearchConstants.FATAL);
+          logLevels.add(LogSearchConstants.ERROR);
+          logLevels.add(LogSearchConstants.WARN);
+          logLevels.add(LogSearchConstants.INFO);
+          logLevels.add(LogSearchConstants.DEBUG);
+          logLevels.add(LogSearchConstants.TRACE);
+          Collection<VNameValue> sortedVNameValues = new ArrayList<VNameValue>();
+          for (String level : logLevels) {
+            VNameValue value = new VNameValue();
+            value.setName(level);
+            String val = "0";
+            for (VNameValue valueLevel : vNameValues) {
+              if (valueLevel.getName().equalsIgnoreCase(level)) {
+                val = valueLevel.getValue();
+                break;
+              }
+            }
+            value.setValue(val);
+            sortedVNameValues.add(value);
+          }
+          vBarGraphData.setDataCounts(sortedVNameValues);
+        } else {
+          vBarGraphData.setDataCounts(vNameValues);
+        }
+        return dataList;
+      } catch (SolrException | SolrServerException | IOException e) {
+        String query = solrQuery != null ? solrQuery.toQueryString() : "";
+        logger.error("Got exception for solr query :" + query,
+            e.getCause());
+      }
+    } else {
+      queryGenerator.setRowCount(solrQuery, 0);
+      String yAxis = yAxisField.contains("count") ? "sum" : yAxisField;
+      String jsonQuery = queryGenerator.buildJSONFacetAggregatedFuncitonQuery(
+          yAxis, xAxisField);
+      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
+      try {
+        QueryResponse response = solrDaoBase.process(solrQuery);
+        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
+            .getResponse().get("facets");
+        if (jsonFacetResponse.toString().equals("{count=0}")){
+          return dataList;
+        }
+        VNameValue value = new VNameValue();
+        String sum = (String) jsonFacetResponse.getVal(1);
+        value.setName(xAxisField);
+        value.setValue(sum != null ? sum.substring(0, sum.indexOf(".")) : "");
+        vNameValues.add(value);
+        vBarGraphData.setDataCounts(vNameValues);
+        vBarGraphData.setName(xAxisField);
+        vBarGraphDatas.add(vBarGraphData);
+        dataList.setGraphData(vBarGraphDatas);
+        return dataList;
+      } catch (SolrException | SolrServerException | IOException e) {
+        String query = solrQuery != null ? solrQuery.toQueryString() : "";
+        logger.error("Got exception for solr query :" + query,
+            e.getCause());
+      }
+    }
+    return null;
+  }
+
+  @SuppressWarnings("unchecked")
+  private VBarDataList nonRangeStackGraph(String xAxisField, String yAxisField,
+      String stackField, String from, String to, SolrDaoBase solrDaoBase,
+      String typeXAxis, String fieldTime, SolrQuery solrQuery) {
+    VBarDataList dataList = new VBarDataList();
+    Collection<VBarGraphData> vGraphData = new ArrayList<VBarGraphData>();
+    String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(fieldTime,
+        from, to);
+    queryGenerator.setMainQuery(solrQuery, mainQuery);
+    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    String jsonQuery = "";
+    if (isTypeNumber(typeXAxis)) {
+      String function = (yAxisField.contains("count")) ? "sum" : yAxisField;
+      jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(stackField,
+          xAxisField, function);
+    } else {
+      jsonQuery = queryGenerator.buildJsonFacetTermsRangeQuery(stackField,
+          xAxisField);
+    }
+    try {
+      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
+      dataList.setGraphData(vGraphData);
+      QueryResponse response = solrDaoBase.process(solrQuery);
+      if (response == null) {
+        response = new QueryResponse();
+      }
+      Long count = response.getResults().getNumFound();
+      if (count <= 0) {
+        return dataList;
+      }
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
+          .getResponse().get("facets");
+      if (jsonFacetResponse == null
+          || jsonFacetResponse.toString().equals("{count=0}")) {
+        return dataList;
+      }
+      extractNonRangeStackValuesFromBucket(jsonFacetResponse, stackField,
+          vGraphData, typeXAxis);
+      if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField)
+          && LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(xAxisField)) {
+        Collection<VBarGraphData> levelVGraphData = dataList.getGraphData();
+        List<String> logLevels = new ArrayList<String>();
+        logLevels.add(LogSearchConstants.FATAL);
+        logLevels.add(LogSearchConstants.ERROR);
+        logLevels.add(LogSearchConstants.WARN);
+        logLevels.add(LogSearchConstants.INFO);
+        logLevels.add(LogSearchConstants.DEBUG);
+        logLevels.add(LogSearchConstants.TRACE);
+        for (VBarGraphData garphData : levelVGraphData) {
+          Collection<VNameValue> valueList = garphData.getDataCount();
+          Collection<VNameValue> valueListSorted = new ArrayList<VNameValue>();
+          for (String level : logLevels) {
+            String val = "0";
+            for (VNameValue value : valueList) {
+              if (value.getName().equalsIgnoreCase(level)) {
+                val = value.getValue();
+                break;
+              }
+            }
+            VNameValue v1 = new VNameValue();
+            v1.setName(level.toUpperCase());
+            v1.setValue(val);
+            valueListSorted.add(v1);
+          }
+          garphData.setDataCounts(valueListSorted);
+        }
+      }
+      return dataList;
+    } catch (SolrException | IOException | SolrServerException e) {
+      String query = solrQuery != null ? solrQuery.toQueryString() : "";
+      logger.error("Got exception for solr query :" + query,
+          e.getCause());
+      throw restErrorUtil.createRESTException(MessageEnums.DATA_NOT_FOUND
+          .getMessage().getMessage(), MessageEnums.DATA_NOT_FOUND);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private VBarDataList rangeNonStackGraph(String xAxisField, String yAxisField,
+      String from, String to, String unit, SolrDaoBase solrDaoBase,
+      String typeXAxis, String fieldTime, SolrQuery solrQuery) {
+    VBarDataList dataList = new VBarDataList();
+    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
+    VBarGraphData vBarGraphData = new VBarGraphData();
+    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+    queryGenerator.setMainQuery(solrQuery, null);
+    if (isTypeNumber(typeXAxis)) {
+      queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
+      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
+          typeXAxis, fieldTime, solrQuery);
+    } else {
+      try {
+        queryGenerator.setFacetRange(solrQuery, xAxisField, from, to, unit);
+        QueryResponse response = solrDaoBase.process(solrQuery);
+        if (response != null) {
+          Long count = response.getResults().getNumFound();
+          if (count > 0) {
+            @SuppressWarnings("rawtypes")
+            List<RangeFacet> rangeFacet = response.getFacetRanges();
+            if (rangeFacet != null && rangeFacet.size() > 0) {
+              List<RangeFacet.Count> listCount = rangeFacet.get(0).getCounts();
+              if (listCount != null) {
+                for (RangeFacet.Count cnt : listCount) {
+                  VNameValue vNameValue = new VNameValue();
+                  vNameValue.setName(cnt.getValue());
+                  vNameValue.setValue("" + cnt.getCount());
+                  vNameValues.add(vNameValue);
+                }
+                vBarGraphData.setDataCounts(vNameValues);
+                vBarGraphDatas.add(vBarGraphData);
+                vBarGraphData.setName(xAxisField);
+                dataList.setGraphData(vBarGraphDatas);
+              }
+            }
+          }
+        }
+        return dataList;
+      } catch (SolrException | SolrServerException | IOException e) {
+        logger.error("Got exception for solr query :" + solrQuery,
+            e.getCause());
+      }
+    }
+    return null;
+  }
+
+  @SuppressWarnings("unchecked")
+  private VBarDataList rangeStackGraph(String xAxisField, String yAxisField,
+      String stackField, String from, String to, String unit,
+      SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime,
+      SolrQuery solrQuery) {
+    VBarDataList dataList = new VBarDataList();
+    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
+    queryGenerator.setMainQuery(solrQuery, null);
+    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    String jsonHistogramQuery = queryGenerator
+        .buildJSONFacetTermTimeRangeQuery(stackField, xAxisField, from, to,
+            unit).replace("\\", "");
+    try {
+      solrQuery.set("json.facet", jsonHistogramQuery);
+      queryGenerator.setRowCount(solrQuery, 0);
+      QueryResponse response = solrDaoBase.process(solrQuery);
+      if (response != null) {
+        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
+            .getResponse().get("facets");
+        if (jsonFacetResponse == null
+            || jsonFacetResponse.toString().equals("{count=0}")) {
+          // return
+          return dataList;
+        }
+        extractRangeStackValuesFromBucket(jsonFacetResponse, "x", "y",
+            histogramData);
+        dataList.setGraphData(histogramData);
+      }
+      return dataList;
+    } catch (SolrException | IOException | SolrServerException e) {
+      logger.error("Got exception for solr query :" + solrQuery,
+          e.getCause());
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
index c32dc36..a813e96 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
@@ -18,167 +18,263 @@
  */
 package org.apache.ambari.logsearch.graph;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Date;
 import java.util.List;
 
+import org.apache.ambari.logsearch.manager.MgrBase;
 import org.apache.ambari.logsearch.util.DateUtil;
+import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.solr.client.solrj.response.RangeFacet;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectWriter;
 import org.springframework.beans.factory.annotation.Autowired;
 
-public class GraphDataGeneratorBase {
+public class GraphDataGeneratorBase extends MgrBase {
+
 
   @Autowired
   DateUtil dateUtil;
 
-  @SuppressWarnings("unchecked")
-  protected void extractRangeStackValuesFromBucket(
-    SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
-    String innerField, List<VBarGraphData> histogramData) {
-    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
-      .get(outerField);
-    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack
-      .get("buckets");
-    for (Object temp : stackBuckets) {
-      VBarGraphData vBarGraphData = new VBarGraphData();
+  @Autowired
+  StringUtil stringUtil;
 
-      SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) temp;
-      String name = ((String) level.getVal(0)).toUpperCase();
-      vBarGraphData.setName(name);
+  private static String BUCKETS = "buckets";
+  
+  public static enum DATA_TYPE {
+    LONG {
+      @Override
+      String getType() {
+        return "long";
+      }
+    },
+    DOUBLE {
+      @Override
+      String getType() {
+        return "double";
+      }
+    },
+    FLOAT {
+      @Override
+      String getType() {
+        return "long";
+      }
+    },
+    INT {
+      @Override
+      String getType() {
+        return "long";
+      }
 
-      Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-      vBarGraphData.setDataCounts(vNameValues);
-      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level
-        .get(innerField)).get("buckets");
-      for (Object temp1 : levelBuckets) {
-        SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = dateUtil
-          .convertDateWithMillisecondsToSolrDate((Date) countValue
-            .getVal(0));
+    };
+    abstract String getType();
+  }
 
-        String count = "" + countValue.getVal(1);
-        VNameValue vNameValue = new VNameValue();
-        vNameValue.setName(value);
-        vNameValue.setValue(count);
-        vNameValues.add(vNameValue);
+  public static enum GRAPH_TYPE {
+    UNKNOWN {
+      @Override
+      int getType() {
+        return 0;
+      }
+    },
+    NORMAL_GRAPH {
+      @Override
+      int getType() {
+        return 1;
+      }
+    },
+    RANGE_NON_STACK_GRAPH {
+      @Override
+      int getType() {
+        return 2;
+      }
+    },
+    NON_RANGE_STACK_GRAPH {
+      @Override
+      int getType() {
+        return 3;
+      }
+    },
+    RANGE_STACK_GRAPH {
+      @Override
+      int getType() {
+        return 4;
+      }
+    };
+    abstract int getType();
+  }
+
+  @SuppressWarnings("unchecked")
+  protected void extractRangeStackValuesFromBucket(
+      SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
+      String innerField, List<VBarGraphData> histogramData) {
+    if (jsonFacetResponse != null) {
+      NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
+          .get(outerField);
+      if (stack != null) {
+        ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get(BUCKETS);
+        if (stackBuckets != null) {
+          for (Object stackBucket : stackBuckets) {
+            VBarGraphData vBarGraphData = new VBarGraphData();
+            SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) stackBucket;
+            if (level != null) {
+              String name = level.getVal(0) != null ? level.getVal(0)
+                  .toString().toUpperCase() : "";
+              vBarGraphData.setName(name);
+              Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+              NamedList<Object> innerFiledValue = (NamedList<Object>) level
+                  .get(innerField);
+              if (innerFiledValue != null) {
+                ArrayList<Object> levelBuckets = (ArrayList<Object>) innerFiledValue
+                    .get(BUCKETS);
+                if (levelBuckets != null) {
+                  for (Object levelBucket : levelBuckets) {
+                    SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) levelBucket;
+                    if (countValue != null) {
+                      String innerName = dateUtil
+                          .convertDateWithMillisecondsToSolrDate((Date) countValue
+                              .getVal(0));
+                      String innerValue = countValue.getVal(1) != null ? countValue
+                          .getVal(1).toString() : "";
+                      VNameValue vNameValue = new VNameValue(innerName,
+                          innerValue);
+                      vNameValues.add(vNameValue);
+                    }
+                  }
+                }
+              }
+              vBarGraphData.setDataCounts(vNameValues);
+            }
+            histogramData.add(vBarGraphData);
+          }
+        }
       }
-      histogramData.add(vBarGraphData);
     }
   }
 
   @SuppressWarnings("unchecked")
   protected boolean extractNonRangeStackValuesFromBucket(
-    SimpleOrderedMap<Object> jsonFacetResponse, String level,
-    Collection<VBarGraphData> vGraphDatas, String typeXAxis) {
-
+      SimpleOrderedMap<Object> jsonFacetResponse, String level,
+      Collection<VBarGraphData> vGraphDatas, String typeXAxis) {
     boolean zeroFlag = true;
-    if (jsonFacetResponse.get(level).toString().equals("{count=0}")) {
+    if (jsonFacetResponse == null || jsonFacetResponse.get(level) == null
+        || jsonFacetResponse.get(level).toString().equals("{count=0}")) {
       return false;
     }
-
-    NamedList<Object> list = (NamedList<Object>) jsonFacetResponse
-      .get(level);
-
-    ArrayList<Object> list3 = (ArrayList<Object>) list.get("buckets");
-    int i = 0;
-    for (i = 0; i < list3.size(); i++) {
-      VBarGraphData vGraphData = new VBarGraphData();
-
-
-      Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
-      vGraphData.setDataCounts(levelCounts);
-
-      SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) list3
-        .get(i);
-      String name = ("" + valueCount.getVal(0)).trim();
-      if (isTypeNumber(typeXAxis)) {
-        VNameValue nameValue = new VNameValue();
-        String value = ("" + valueCount.getVal(2)).trim().substring(0, ("" + valueCount.getVal(2)).indexOf("."));
-        nameValue.setName(name);
-        nameValue.setValue(value);
-        levelCounts.add(nameValue);
-      } else {
-        SimpleOrderedMap<Object> l1 = (SimpleOrderedMap<Object>) valueCount
-          .getVal(2);
-        ArrayList<Object> l2 = (ArrayList<Object>) l1.get("buckets");
-        for (int j = 0; l2 != null && j < l2.size(); j++) {
-          VNameValue nameValue = new VNameValue();
-          SimpleOrderedMap<Object> innerValueCount = (SimpleOrderedMap<Object>) l2
-            .get(j);
-          nameValue.setName(("" + innerValueCount.getVal(0)).trim());
-          nameValue.setValue(("" + innerValueCount.getVal(1)).trim());
-          levelCounts.add(nameValue);
+    NamedList<Object> levelList = (NamedList<Object>) jsonFacetResponse
+        .get(level);
+    if (levelList != null) {
+      ArrayList<Object> bucketList = (ArrayList<Object>) levelList.get(BUCKETS);
+      if (bucketList != null) {
+        for (int index = 0; index < bucketList.size(); index++) {
+          SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList
+              .get(index);
+          if (valueCount != null && valueCount.size() > 2) {
+            VBarGraphData vGraphData = new VBarGraphData();
+            Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
+            String name = valueCount.getVal(0) != null ? valueCount.getVal(0)
+                .toString().trim() : "";
+            if (isTypeNumber(typeXAxis)) {
+              VNameValue nameValue = new VNameValue();
+              Double sumValue = (Double) valueCount.getVal(2);
+              String value = "0";// default is zero
+              if (sumValue != null) {
+                value = "" + sumValue.longValue();
+              }
+              nameValue.setName(name);
+              nameValue.setValue(value);
+              levelCounts.add(nameValue);
+            } else {
+              SimpleOrderedMap<Object> valueCountMap = (SimpleOrderedMap<Object>) valueCount
+                  .getVal(2);
+              if (valueCountMap != null) {
+                ArrayList<Object> buckets = (ArrayList<Object>) valueCountMap
+                    .get(BUCKETS);
+                if (buckets != null) {
+                  for (int innerIndex = 0; innerIndex < buckets.size(); innerIndex++) {
+                    SimpleOrderedMap<Object> innerValueCount = (SimpleOrderedMap<Object>) buckets
+                        .get(innerIndex);
+                    if (innerValueCount != null) {
+                      String innerName = innerValueCount.getVal(0) != null ? innerValueCount
+                          .getVal(0).toString().trim()
+                          : "";
+                      String innerValue = innerValueCount.getVal(1) != null ? innerValueCount
+                          .getVal(1).toString().trim()
+                          : "";
+                      VNameValue nameValue = new VNameValue(innerName,
+                          innerValue);
+                      levelCounts.add(nameValue);
+                    }
+                  }
+                }
+              }
+            }
+            vGraphData.setName(name);
+            vGraphData.setDataCounts(levelCounts);
+            vGraphDatas.add(vGraphData);
+          }
         }
       }
-
-      vGraphData.setName(name);
-      vGraphDatas.add(vGraphData);
     }
     return zeroFlag;
   }
 
   @SuppressWarnings("unchecked")
   protected boolean extractValuesFromJson(
-    SimpleOrderedMap<Object> jsonFacetResponse, String level,
-    VBarGraphData histogramData, List<RangeFacet.Count> counts) {
+      SimpleOrderedMap<Object> jsonFacetResponse, String level,
+      VBarGraphData histogramData, List<RangeFacet.Count> counts) {
     histogramData.setName(level);
     Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
     histogramData.setDataCounts(levelCounts);
     boolean zeroFlag = true;
     if (jsonFacetResponse.get(level).toString().equals("{count=0}")) {
-      for (RangeFacet.Count date : counts) {
-        VNameValue nameValue = new VNameValue();
-
-        nameValue.setName(date.getValue());
-        nameValue.setValue("0");
-
-        levelCounts.add(nameValue);
+      if (counts != null) {
+        for (RangeFacet.Count date : counts) {
+          VNameValue nameValue = new VNameValue();
+          nameValue.setName(date.getValue());
+          nameValue.setValue("0");
+          levelCounts.add(nameValue);
+        }
       }
       return false;
     }
-    NamedList<Object> list = (NamedList<Object>) jsonFacetResponse
-      .get(level);
-    NamedList<Object> list2 = (NamedList<Object>) list.getVal(1);
-    ArrayList<Object> list3 = (ArrayList<Object>) list2.get("buckets");
-    int i = 0;
-    for (RangeFacet.Count date : counts) {
-      VNameValue nameValue = new VNameValue();
-      SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) list3
-        .get(i);
-      String count = ("" + valueCount.getVal(1)).trim();
-      if (!"0".equals(count)) {
-        zeroFlag = false;
+    NamedList<Object> levelList = (NamedList<Object>) jsonFacetResponse
+        .get(level);
+    if (levelList != null && counts != null && levelList.size() > 1) {
+      NamedList<Object> levelValues = (NamedList<Object>) levelList.getVal(1);
+      if (levelValues != null) {
+        ArrayList<Object> bucketList = (ArrayList<Object>) levelValues
+            .get(BUCKETS);
+        int i = 0;
+        for (RangeFacet.Count date : counts) {
+          SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList
+              .get(i);
+          if (valueCount != null) {
+            Double count = (Double) valueCount.getVal(1);
+            if (count != null && !count.equals(0D)) {
+              zeroFlag = false;
+            }
+            String name = date.getValue();
+            String value = count != null ? "" + count.longValue() : "0";
+            VNameValue nameValue = new VNameValue(name, value);
+            levelCounts.add(nameValue);
+          }
+          i++;
+        }
       }
-      nameValue.setName(date.getValue());
-      nameValue.setValue(count);
-
-      levelCounts.add(nameValue);
-      i++;
     }
-
     return zeroFlag;
   }
 
   protected boolean isTypeNumber(String typeXAxis) {
-    return "long".contains(typeXAxis) || "int".contains(typeXAxis)
-      || "float".contains(typeXAxis) || "double".contains(typeXAxis);
-  }
-
-  public String convertObjToString(Object obj) throws IOException {
-    if (obj == null) {
-      return "";
+    if (stringUtil.isEmpty(typeXAxis)) {
+      return false;
+    } else {
+      return typeXAxis.contains(DATA_TYPE.LONG.getType()) || typeXAxis.contains(DATA_TYPE.INT.getType())
+          || typeXAxis.contains(DATA_TYPE.FLOAT.getType()) || typeXAxis.contains(DATA_TYPE.DOUBLE.getType());
     }
-    ObjectMapper mapper = new ObjectMapper();
-    ObjectWriter w = mapper.writerWithDefaultPrettyPrinter();
-    return w.writeValueAsString(obj);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGnerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGnerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGnerator.java
deleted file mode 100644
index 44143f4..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGnerator.java
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.graph;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.query.QueryGeneration;
-import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
-import org.apache.ambari.logsearch.view.VBarDataList;
-import org.apache.ambari.logsearch.view.VBarGraphData;
-import org.apache.ambari.logsearch.view.VNameValue;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.FacetField;
-import org.apache.solr.client.solrj.response.FacetField.Count;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.client.solrj.response.RangeFacet;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class GraphDataGnerator extends GraphDataGeneratorBase {
-
-  @Autowired
-  StringUtil stringUtil;
-
-  @Autowired
-  QueryGeneration queryGenerator;
-
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  @Autowired
-  DateUtil dateUtil;
-
-  private static Logger logger = Logger.getLogger(GraphDataGnerator.class);
-
-  public String getAnyGraphData(SearchCriteria searchCriteria,
-                                SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
-    // X axis credentials
-    String xAxisField = (String) searchCriteria.getParamValue("xAxis");
-    String stackField = (String) searchCriteria.getParamValue("stackBy");
-    String from = (String) searchCriteria.getParamValue("from");
-    String to = (String) searchCriteria.getParamValue("to");
-    String unit = (String) searchCriteria.getParamValue("unit");
-    String suffix = (String) searchCriteria.getParamValue("suffix");
-    String typeXAxis = ConfigUtil.schemaFieldsName.get(xAxisField + suffix);
-    typeXAxis = (stringUtil.isEmpty(typeXAxis)) ? "string" : typeXAxis;
-
-    // Y axis credentials
-    String yAxisField = (String) searchCriteria.getParamValue("yAxis");
-
-    searchCriteria.addParam("type", typeXAxis);
-    String fieldTime = (String) searchCriteria.getParamValue("feildTime");
-
-    int garphType = getGraphType(searchCriteria);
-
-    switch (garphType) {
-      case 1:
-        return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
-          typeXAxis, fieldTime, solrQuery);
-      case 2:
-        return rangeNonStackGraph(xAxisField, yAxisField, from, to, unit,
-          solrDaoBase, typeXAxis, fieldTime, solrQuery);
-      case 3:
-        return nonRangeStackGraph(xAxisField, yAxisField, stackField, from,
-          to, solrDaoBase, typeXAxis, fieldTime, solrQuery);
-      case 4:
-        return rangeStackGraph(xAxisField, yAxisField, stackField, from,
-          to, unit, solrDaoBase, typeXAxis, fieldTime, solrQuery);
-      default:
-        return null;
-    }
-  }
-
-  private int getGraphType(SearchCriteria searchCriteria) {
-    // X axis credentials
-    String xAxisField = (String) searchCriteria.getParamValue("xAxis");
-    String stackField = (String) searchCriteria.getParamValue("stackBy");
-    String from = (String) searchCriteria.getParamValue("from");
-    String to = (String) searchCriteria.getParamValue("to");
-    String xType = (String) searchCriteria.getParamValue("type");
-
-    if (xType == null)
-      return 0;
-
-    // Y axis credentials
-    String yAxisField = (String) searchCriteria.getParamValue("yAxis");
-    if (stringUtil.isEmpty(xAxisField) || stringUtil.isEmpty(yAxisField)) {
-    }
-    // Normal Graph Type
-    else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-      && !stringUtil.isEmpty(from)
-      && !(xType.contains("date") || xType.contains("time")))
-      return 1;
-      // Range(Non-Stack) Graph Type
-    else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-      && !stringUtil.isEmpty(from)
-      && (xType.contains("date") || xType.contains("time")))
-      return 2;
-      // Non-Range Stack Graph Type
-    else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-      && !stringUtil.isEmpty(from)
-      && !(xType.contains("date") || xType.contains("time")))
-      return 3;
-      // Range Stack GraphType
-    else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-      && !stringUtil.isEmpty(from)
-      && (xType.contains("date") || xType.contains("time")))
-      return 4;
-
-    return 0;
-  }
-
-  @SuppressWarnings("unchecked")
-  private String normalGraph(String xAxisField, String yAxisField,
-                             String from, String to, SolrDaoBase solrDaoBase, String typeXAxis,
-                             String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
-    VBarGraphData vBarGraphData = new VBarGraphData();
-    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from
-      + " TO " + to + "]");
-    if (typeXAxis.contains("string")
-      || typeXAxis.contains("key_lower_case")
-      || typeXAxis.contains("text")) {
-      queryGenerator.setFacetField(solrQuery, xAxisField);
-      try {
-        QueryResponse response = solrDaoBase.process(solrQuery);
-        Long count = response.getResults().getNumFound();
-
-        if (response != null && count > 0) {
-          FacetField facetField = response.getFacetField(xAxisField);
-          if (facetField != null) {
-            List<Count> countValues = facetField.getValues();
-            for (Count cnt : countValues) {
-              VNameValue vNameValue = new VNameValue();
-              vNameValue.setName(cnt.getName());
-              vNameValue.setValue("" + cnt.getCount());
-              vNameValues.add(vNameValue);
-            }
-            vBarGraphData.setDataCounts(vNameValues);
-            vBarGraphData.setName(xAxisField);
-            vBarGraphDatas.add(vBarGraphData);
-            dataList.setGraphData(vBarGraphDatas);
-          }
-        }
-        return convertObjToString(dataList);
-      } catch (SolrException | SolrServerException | IOException e) {
-
-      }
-    } else {
-      queryGenerator.setRowCount(solrQuery, 0);
-      String yAxis = yAxisField.contains("count") ? "sum" : yAxisField;
-      String jsonQuery = queryGenerator
-        .buildJSONFacetAggregatedFuncitonQuery(yAxis,
-          xAxisField);
-      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
-      try {
-        QueryResponse response = solrDaoBase.process(solrQuery);
-
-        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-          .getResponse().get("facets");
-        if (jsonFacetResponse.toString().equals("{count=0}"))
-          return convertObjToString(dataList);
-        VNameValue value = new VNameValue();
-        String sum = jsonFacetResponse.getVal(1).toString();
-        value.setName(xAxisField);
-        value.setValue(sum.substring(0, sum.indexOf(".")));
-
-        vNameValues.add(value);
-        vBarGraphData.setDataCounts(vNameValues);
-        vBarGraphData.setName(xAxisField);
-        vBarGraphDatas.add(vBarGraphData);
-        dataList.setGraphData(vBarGraphDatas);
-        return convertObjToString(dataList);
-      } catch (SolrException | SolrServerException | IOException e) {
-
-      }
-    }
-
-    return null;
-  }
-
-  @SuppressWarnings("unchecked")
-  private String nonRangeStackGraph(String xAxisField, String yAxisField,
-                                    String stackField, String from, String to, SolrDaoBase solrDaoBase,
-                                    String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vGraphData = new ArrayList<VBarGraphData>();
-
-    String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(
-      fieldTime, from, to);
-    queryGenerator.setMainQuery(solrQuery, mainQuery);
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-    String jsonQuery = "";
-
-    if (isTypeNumber(typeXAxis)) {
-      String function = (yAxisField.contains("count")) ? "sum"
-        : yAxisField;
-      jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(
-        stackField, xAxisField, function);
-    } else {
-      jsonQuery = queryGenerator.buildJsonFacetTermsRangeQuery(
-        stackField, xAxisField);
-    }
-
-    try {
-      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
-      dataList.setGraphData(vGraphData);
-
-      QueryResponse response = solrDaoBase.process(solrQuery);
-      if (response == null) {
-        response = new QueryResponse();
-      }
-      Long count = response.getResults().getNumFound();
-      if (count <= 0)
-        return convertObjToString(dataList);
-
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-        .getResponse().get("facets");
-      if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}")) {
-        return convertObjToString(dataList);
-      }
-
-      extractNonRangeStackValuesFromBucket(jsonFacetResponse, stackField, vGraphData,
-        typeXAxis);
-
-      if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField)
-        && LogSearchConstants.SOLR_LEVEL
-        .equalsIgnoreCase(xAxisField)) {
-        Collection<VBarGraphData> levelVGraphData = dataList.getGraphData();
-        List<String> logLevels = new ArrayList<String>();
-
-        logLevels.add(LogSearchConstants.FATAL);
-        logLevels.add(LogSearchConstants.ERROR);
-        logLevels.add(LogSearchConstants.WARN);
-        logLevels.add(LogSearchConstants.INFO);
-        logLevels.add(LogSearchConstants.DEBUG);
-        logLevels.add(LogSearchConstants.TRACE);
-
-        for (VBarGraphData garphData : levelVGraphData) {
-          Collection<VNameValue> valueList = garphData.getDataCount();
-          Collection<VNameValue> valueListSorted = new ArrayList<VNameValue>();
-          for (String level : logLevels) {
-            String val = "0";
-            for (VNameValue value : valueList) {
-              if (value.getName().equalsIgnoreCase(level)) {
-                val = value.getValue();
-                break;
-              }
-            }
-            VNameValue v1 = new VNameValue();
-            v1.setName(level.toUpperCase());
-            v1.setValue(val);
-            valueListSorted.add(v1);
-          }
-          garphData.setDataCounts(valueListSorted);
-        }
-      }
-
-      return convertObjToString(dataList);
-    } catch (SolrException | IOException | SolrServerException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.DATA_NOT_FOUND);
-    }
-
-  }
-
-  @SuppressWarnings("unchecked")
-  private String rangeNonStackGraph(String xAxisField, String yAxisField,
-                                    String from, String to, String unit, SolrDaoBase solrDaoBase,
-                                    String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
-    VBarGraphData vBarGraphData = new VBarGraphData();
-    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-
-    queryGenerator.setMainQuery(solrQuery, null);
-
-    if (isTypeNumber(typeXAxis)) {
-      queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
-      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
-        typeXAxis, fieldTime, solrQuery);
-    } else {
-      try {
-        queryGenerator.setFacetRange(solrQuery, xAxisField, from, to,
-          unit);
-        QueryResponse response = solrDaoBase.process(solrQuery);
-        if (response == null)
-          response = new QueryResponse();
-        Long count = response.getResults().getNumFound();
-        if (count > 0) {
-
-          @SuppressWarnings("rawtypes")
-          List<RangeFacet> rangeFacet = response.getFacetRanges();
-          if (rangeFacet == null)
-            return convertObjToString(dataList);
-
-          List<RangeFacet.Count> listCount = rangeFacet.get(0)
-            .getCounts();
-          if (listCount != null) {
-            for (RangeFacet.Count cnt : listCount) {
-              VNameValue vNameValue = new VNameValue();
-              vNameValue.setName(cnt.getValue());
-              vNameValue.setValue("" + cnt.getCount());
-              vNameValues.add(vNameValue);
-            }
-            vBarGraphData.setDataCounts(vNameValues);
-            vBarGraphDatas.add(vBarGraphData);
-            vBarGraphData.setName(xAxisField);
-            dataList.setGraphData(vBarGraphDatas);
-          }
-        }
-        return convertObjToString(dataList);
-      } catch (SolrException | SolrServerException | IOException e) {
-
-      }
-    }
-    return null;
-  }
-
-  @SuppressWarnings("unchecked")
-  private String rangeStackGraph(String xAxisField, String yAxisField,
-                                 String stackField, String from, String to, String unit,
-                                 SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-
-    String jsonHistogramQuery = queryGenerator
-      .buildJSONFacetTermTimeRangeQuery(stackField, xAxisField, from,
-        to, unit).replace("\\", "");
-
-    try {
-      solrQuery.set("json.facet", jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery, 0);
-      QueryResponse response = solrDaoBase.process(solrQuery);
-      if (response == null)
-        response = new QueryResponse();
-
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-        .getResponse().get("facets");
-
-      if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}"))
-        return convertObjToString(dataList);
-
-      extractRangeStackValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
-
-      dataList.setGraphData(histogramData);
-      return convertObjToString(dataList);
-
-    } catch (SolrException | IOException | SolrServerException e) {
-    }
-
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
index e16faa0..b5efd24 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
@@ -37,8 +37,7 @@ import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.AuditSolrDao;
-import org.apache.ambari.logsearch.graph.GraphDataGnerator;
-import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.graph.GraphDataGenerator;
 import org.apache.ambari.logsearch.util.BizUtil;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.DateUtil;
@@ -51,6 +50,7 @@ import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VGroupList;
 import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.ambari.logsearch.view.VNameValueList;
+import org.apache.ambari.logsearch.view.VResponse;
 import org.apache.ambari.logsearch.view.VSolrLogList;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -72,9 +72,6 @@ public class AuditMgr extends MgrBase {
   static Logger logger = Logger.getLogger(AuditMgr.class);
 
   @Autowired
-  QueryGeneration queryGenerator;
-
-  @Autowired
   AuditSolrDao auditSolrDao;
 
   @Autowired
@@ -93,69 +90,20 @@ public class AuditMgr extends MgrBase {
   DateUtil dateUtil;
 
   @Autowired
-  GraphDataGnerator graphDataGnerator;
-
-  public String getAllSolrFields() {
-
-    Collection<String> fieldNames = new ArrayList<String>();
-    SolrQuery solrQuery = new SolrQuery();
-
-    int numberDocument = 0;
-    int size = 0;
-
-    try {
-      queryGenerator.setMainQuery(solrQuery, null);
-      queryGenerator.setRowCount(solrQuery, 10);
-
-      SearchCriteria searchCriteria = new SearchCriteria();
-      searchCriteria.setSortBy(LogSearchConstants.AUDIT_EVTTIME);
-      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
-
-      queryGenerator.setSortOrderDefaultServiceLog(solrQuery,
-        searchCriteria);
-      SolrDocumentList docList = auditSolrDao.process(solrQuery)
-        .getResults();
-
-      if (docList.size() > 0) {
-        for (int i = 0; i < 10; i++) {
-          if (docList.get(i).size() > size) {
-            size = docList.get(i).size();
-            numberDocument = i;
-          }
-        }
-        fieldNames = docList.get(numberDocument).getFieldNames();
-        return convertObjToString(fieldNames);
-      }
-      return convertObjToString(fieldNames);
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
-  }
+  GraphDataGenerator graphDataGenerator;
 
   public String getLogs(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
-    try {
-      QueryResponse response = auditSolrDao.process(solrQuery);
-      SolrDocumentList docList = response.getResults();
-      VSolrLogList collection = new VSolrLogList(docList);
-      collection.setStartIndex((int) docList.getStart());
-      collection.setTotalCount(docList.getNumFound());
-      collection.setPageSize(searchCriteria.getMaxRows());
-      return convertObjToString(collection);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    VSolrLogList collection = getLogAsPaginationProvided(solrQuery,
+        auditSolrDao);
+    return convertObjToString(collection);
+
   }
 
   public SolrDocumentList getComponents(SearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
+    SolrDocumentList docList = new SolrDocumentList();
     try {
       queryGenerator.setFacetField(solrQuery,
         LogSearchConstants.AUDIT_COMPONENT);
@@ -166,12 +114,12 @@ public class AuditMgr extends MgrBase {
 
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
       if (queryResponse == null) {
-        queryResponse = new QueryResponse();
+        return docList;
       }
 
       facetFields = queryResponse.getFacetFields();
       if (facetFields == null) {
-        return new SolrDocumentList();
+        return docList;
       }
       if (!facetFields.isEmpty()) {
         facetField = facetFields.get(0);
@@ -179,7 +127,7 @@ public class AuditMgr extends MgrBase {
       if (facetField != null) {
         componentsCount = facetField.getValues();
       }
-      SolrDocumentList docList = new SolrDocumentList();
+    
       for (Count compnonet : componentsCount) {
         SolrDocument solrDocument = new SolrDocument();
         solrDocument.addField("type", compnonet.getName());
@@ -188,25 +136,17 @@ public class AuditMgr extends MgrBase {
       return docList;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String getAuditComponents(SearchCriteria searchCriteria) {
     VGroupList vGroupList = new VGroupList();
+    SolrDocumentList docList = getComponents(searchCriteria);
 
-    try {
-
-      SolrDocumentList docList = getComponents(searchCriteria);
-
-      vGroupList.setGroupDocuments(docList);
-      return convertObjToString(vGroupList);
-    } catch (SolrException | IOException e) {
-      logger.error("Error", e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
+    vGroupList.setGroupDocuments(docList);
+    return convertObjToString(vGroupList);
   }
 
   @SuppressWarnings("unchecked")
@@ -214,9 +154,11 @@ public class AuditMgr extends MgrBase {
     VBarDataList dataList = new VBarDataList();
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
-    String from = (String) searchCriteria.getParamValue("startTime");
-    String to = (String) searchCriteria.getParamValue("endTime");
-    String unit = (String) searchCriteria.getParamValue("unit");
+    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
+    String to = getTo((String) searchCriteria.getParamValue("endTime"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+    
+    
 
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
     String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(
@@ -228,15 +170,16 @@ public class AuditMgr extends MgrBase {
       queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
       queryGenerator.setRowCount(solrQuery, 0);
       QueryResponse response = auditSolrDao.process(solrQuery);
-      if (response == null)
-        response = new QueryResponse();
-
+      if (response == null){
+        return convertObjToString(dataList);
+      }
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
         .getResponse().get("facets");
 
       if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}"))
+        || jsonFacetResponse.toString().equals("{count=0}")){
         return convertObjToString(dataList);
+      }
 
       extractValuesFromBucket(jsonFacetResponse, "x", "y",
         histogramData);
@@ -246,17 +189,19 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException("No Request Parameter",
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
 
     }
   }
 
   public String getTopAuditFieldCount(SearchCriteria searchCriteria) {
+    int topCounts = 10;
     Integer top = (Integer) searchCriteria.getParamValue("top");
     String facetField = (String) searchCriteria.getParamValue("field");
-    if (top == null)
-      top = new Integer(10);
+    if (top == null){
+      top = new Integer(topCounts);
+    }
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
     try {
@@ -266,11 +211,15 @@ public class AuditMgr extends MgrBase {
       VNameValueList nameValueList = new VNameValueList(nameValues);
 
       queryGenerator.setFacetField(solrQuery, facetField);
-      queryGenerator.setFacetSort(solrQuery, "count");
+      queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_COUNT);
       queryGenerator.setFacetLimit(solrQuery, top.intValue());
 
       List<Count> countList = new ArrayList<FacetField.Count>();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if(queryResponse == null){
+        return convertObjToString(nameValueList);
+      }
+      
       if (queryResponse.getFacetField(facetField) != null) {
         FacetField queryFacetField = queryResponse
           .getFacetField(facetField);
@@ -290,13 +239,14 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
-  @SuppressWarnings("unchecked")
+  @SuppressWarnings({ "unchecked", "rawtypes" })
   public String getLiveLogCounts() {
+    VNameValueList nameValueList = new VNameValueList();
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setParam("event", "/getLiveLogsCount");
     try {
@@ -313,11 +263,18 @@ public class AuditMgr extends MgrBase {
       List<RangeFacet.Count> listCount;
 
       QueryResponse response = auditSolrDao.process(solrQuery);
-      @SuppressWarnings("rawtypes")
+ 
       List<RangeFacet> rangeFacet = response.getFacetRanges();
-      if (rangeFacet == null)
-        return convertObjToString(new VNameValueList());
-      listCount = rangeFacet.get(0).getCounts();
+      if (rangeFacet == null){
+        return convertObjToString(nameValueList);
+      }
+      RangeFacet range=rangeFacet.get(0);
+      
+      if(range == null){
+        return convertObjToString(nameValueList);
+      }
+      
+      listCount = range.getCounts();
 
       List<VNameValue> nameValues = new ArrayList<VNameValue>();
       int count = 0;
@@ -328,15 +285,14 @@ public class AuditMgr extends MgrBase {
         nameValues.add(nameValue);
         count++;
       }
-      VNameValueList nameValueList = new VNameValueList(nameValues);
-
+      nameValueList.setVNameValues(nameValues);
       return convertObjToString(nameValueList);
 
     } catch (SolrException | SolrServerException | ParseException
       | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -345,13 +301,17 @@ public class AuditMgr extends MgrBase {
     String jsonUserQuery = "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
-    solrQuery.set("json.facet", jsonUserQuery);
+    queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
     queryGenerator.setRowCount(solrQuery, 0);
     try {
+      VBarDataList vBarDataList = new VBarDataList();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if(queryResponse == null){
+        return convertObjToString(vBarDataList);
+      }
 
       NamedList<Object> namedList = queryResponse.getResponse();
-      VBarDataList vBarDataList = new VBarDataList();
+      
       if (namedList == null) {
         return convertObjToString(vBarDataList);
       }
@@ -359,13 +319,19 @@ public class AuditMgr extends MgrBase {
       @SuppressWarnings("unchecked")
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
         .get("facets");
-      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse);
+      if(jsonFacetResponse == null){
+        return convertObjToString(vBarDataList);
+      }
+      if(jsonFacetResponse.toString().equals("{count=0}")){
+        return convertObjToString(vBarDataList);
+      }
+      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       return convertObjToString(vBarDataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -374,13 +340,16 @@ public class AuditMgr extends MgrBase {
     String jsonUserQuery = "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
-    solrQuery.set("json.facet", jsonUserQuery);
+    queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
     queryGenerator.setRowCount(solrQuery, 0);
     try {
+      VBarDataList vBarDataList = new VBarDataList();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if(queryResponse == null){
+        return convertObjToString(vBarDataList);
+      }
 
       NamedList<Object> namedList = queryResponse.getResponse();
-      VBarDataList vBarDataList = new VBarDataList();
       if (namedList == null) {
         return convertObjToString(vBarDataList);
       }
@@ -389,22 +358,23 @@ public class AuditMgr extends MgrBase {
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
         .get("facets");
 
-      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse);
+      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       return convertObjToString(vBarDataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   @SuppressWarnings("unchecked")
   public String getRequestUserLineGraph(SearchCriteria searchCriteria) {
 
-    String from = (String) searchCriteria.getParamValue("startTime");
-    String to = (String) searchCriteria.getParamValue("endTime");
-    String unit = (String) searchCriteria.getParamValue("unit");
+    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
+    String to = getTo((String) searchCriteria.getParamValue("endTime"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+    
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
 
@@ -420,19 +390,19 @@ public class AuditMgr extends MgrBase {
       .replace("\\", "");
 
     try {
-      solrQuery.set("json.facet", jsonHistogramQuery);
+      queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
       queryGenerator.setRowCount(solrQuery, 0);
       QueryResponse response = auditSolrDao.process(solrQuery);
-      if (response == null)
-        response = new QueryResponse();
-
+      if (response == null){
+        return convertObjToString(dataList);
+      }
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
         .getResponse().get("facets");
 
       if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}"))
+        || jsonFacetResponse.toString().equals("{count=0}")){
         return convertObjToString(dataList);
-
+      }
       extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
 
       dataList.setGraphData(histogramData);
@@ -440,13 +410,14 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
   }
 
   public SolrDocumentList getRequestUser(SearchCriteria searchCriteria) {
+    SolrDocumentList docList = new SolrDocumentList();
     SolrQuery solrQuery = queryGenerator
       .commonAuditFilterQuery(searchCriteria);
     try {
@@ -459,12 +430,12 @@ public class AuditMgr extends MgrBase {
 
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
       if (queryResponse == null) {
-        queryResponse = new QueryResponse();
+        return docList;
       }
 
       facetFields = queryResponse.getFacetFields();
       if (facetFields == null) {
-        return new SolrDocumentList();
+        return docList;
       }
       if (!facetFields.isEmpty()) {
         facetField = facetFields.get(0);
@@ -472,7 +443,7 @@ public class AuditMgr extends MgrBase {
       if (facetField != null) {
         componentsCount = facetField.getValues();
       }
-      SolrDocumentList docList = new SolrDocumentList();
+     
       for (Count compnonet : componentsCount) {
         SolrDocument solrDocument = new SolrDocument();
         solrDocument.addField("type", compnonet.getName());
@@ -481,22 +452,22 @@ public class AuditMgr extends MgrBase {
       return docList;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String getAuditLogsSchemaFieldsName() {
     String suffix = PropertiesUtil.getProperty("auditlog.solr.core.logs");
     String excludeArray[] = PropertiesUtil
-      .getPropertyStringList("auditlog.exclude.columnlist");
+        .getPropertyStringList("auditlog.exclude.columnlist");
     List<String> fieldNames = new ArrayList<String>();
     HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
     ConfigUtil.getSchemaFieldsName(suffix, excludeArray, fieldNames);
 
     for (String fieldName : fieldNames) {
       String uiField = ConfigUtil.auditLogsColumnMapping.get(fieldName
-        + LogSearchConstants.SOLR_SUFFIX);
+          + LogSearchConstants.SOLR_SUFFIX);
       if (uiField == null) {
         uiFieldColumnMapping.put(fieldName, fieldName);
       } else {
@@ -504,33 +475,24 @@ public class AuditMgr extends MgrBase {
       }
     }
 
-    try {
-      uiFieldColumnMapping = bizUtil
-        .sortHashMapByValuesD(uiFieldColumnMapping);
-      return convertObjToString(uiFieldColumnMapping);
-    } catch (IOException e) {
-      logger.error(e);
-    }
-    throw restErrorUtil.createRESTException(
-      "Cache is Empty for FieldsName", MessageEnums.DATA_NOT_FOUND);
+    uiFieldColumnMapping = bizUtil.sortHashMapByValues(uiFieldColumnMapping);
+    return convertObjToString(uiFieldColumnMapping);
+
   }
 
   public String getAnyGraphData(SearchCriteria searchCriteria) {
-    searchCriteria.addParam("feildTime", LogSearchConstants.AUDIT_EVTTIME);
+    searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
     String suffix = PropertiesUtil.getProperty("auditlog.solr.core.logs");
     searchCriteria.addParam("suffix", suffix);
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
-    String result = graphDataGnerator.getAnyGraphData(searchCriteria,
-      auditSolrDao, solrQuery);
-    if (result != null)
-      return result;
-    try {
-      return convertObjToString(new VBarDataList());
-    } catch (IOException e) {
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
+        auditSolrDao, solrQuery);
+    if (result == null) {
+      result = new VBarDataList();
     }
+
+    return convertObjToString(result);
+
   }
 
   @SuppressWarnings("unchecked")
@@ -586,20 +548,27 @@ public class AuditMgr extends MgrBase {
     String dataFormat = (String) searchCriteria.getParamValue("format");
     try {
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if(queryResponse == null){
+        VResponse response = new VResponse();
+        response.setMsgDesc("Query was not able to execute "+solrQuery);
+        throw restErrorUtil.createRESTException(response);
+      }
 
       NamedList<Object> namedList = queryResponse.getResponse();
+      if(namedList == null){
+        VResponse response = new VResponse();
+        response.setMsgDesc("Query was not able to execute "+solrQuery);
+        throw restErrorUtil.createRESTException(response);
+      }
       VBarDataList vBarUserDataList = new VBarDataList();
       VBarDataList vBarResourceDataList = new VBarDataList();
-      if (namedList == null) {
-
-      }
 
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
         .get("facets");
       vBarUserDataList = bizUtil
-        .buildSummaryForTopCounts(jsonFacetResponse);
+        .buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       vBarResourceDataList = bizUtil
-        .buildSummaryForResourceCounts(jsonFacetResponse);
+        .buildSummaryForTopCounts(jsonFacetResponse,"y","x");
       String data = "";
       String summary = "";
       if ("text".equals(dataFormat)) {
@@ -698,8 +667,8 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -725,10 +694,19 @@ public class AuditMgr extends MgrBase {
         LogSearchConstants.AUDIT_COMPONENT);
       QueryResponse serviceLoadResponse = auditSolrDao
         .process(serivceLoadQuery);
-      if (serviceLoadResponse == null)
-        return "[]";
-      List<Count> serviceLoadFacets = serviceLoadResponse.getFacetField(
-        LogSearchConstants.AUDIT_COMPONENT).getValues();
+      if (serviceLoadResponse == null){
+        return convertObjToString(dataList);
+      }
+      FacetField serviceFacetField =serviceLoadResponse.getFacetField(
+          LogSearchConstants.AUDIT_COMPONENT);
+      if(serviceFacetField == null){
+        return convertObjToString(dataList);
+      }
+      
+      List<Count> serviceLoadFacets = serviceFacetField.getValues();
+      if(serviceLoadFacets == null){
+        return convertObjToString(dataList);
+      }
       for (Count cnt : serviceLoadFacets) {
         List<VNameValue> valueList = new ArrayList<VNameValue>();
         VBarGraphData vBarGraphData = new VBarGraphData();
@@ -746,8 +724,8 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     
   /*  


[5/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index 765c639..0442cf9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -40,11 +40,9 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
-import org.apache.ambari.logsearch.graph.GraphDataGnerator;
-import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.graph.GraphDataGenerator;
 import org.apache.ambari.logsearch.util.BizUtil;
 import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.FileUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
@@ -85,6 +83,10 @@ public class LogsMgr extends MgrBase {
   public static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
 
   public static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
+  
+  public static enum CONDITION {
+    OR, AND
+  }
 
   @Autowired
   ServiceLogsSolrDao serviceLogsSolrDao;
@@ -93,141 +95,110 @@ public class LogsMgr extends MgrBase {
   BizUtil bizUtil;
 
   @Autowired
-  QueryGeneration queryGenerator;
-
-  @Autowired
   FileUtil fileUtil;
 
-  @Autowired
-  DateUtil dateUtil;
-
 
   @Autowired
-  GraphDataGnerator graphDataGnerator;
+  GraphDataGenerator graphDataGenerator;
 
 
   public String searchLogs(SearchCriteria searchCriteria) {
     String keyword = (String) searchCriteria.getParamValue("keyword");
-    if (!stringUtil.isEmpty(keyword))
+    String logId = (String) searchCriteria.getParamValue("sourceLogId");
+    if (!stringUtil.isEmpty(keyword)) {
       try {
         return getPageByKeyword(searchCriteria);
       } catch (SolrException | SolrServerException e) {
         logger.error("Error while getting keyword=" + keyword, e);
+        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
-    String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    if (!stringUtil.isEmpty(logId))
+    } else if (!stringUtil.isEmpty(logId)) {
       try {
         return getPageByLogId(searchCriteria);
       } catch (SolrException e) {
         logger.error("Error while getting keyword=" + keyword, e);
+        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
-
-    solrQuery.setParam("event", "/solr/logs_search");
-    try {
-      VSolrLogList collection = getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
-      return convertObjToString(collection);
-    } catch (SolrException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
-  }
+    } else {
+      SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
 
-  public String getHosts(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetField(solrQuery, LogSearchConstants.SOLR_HOST);
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      FacetField hostFacetField = response
-        .getFacetField(LogSearchConstants.SOLR_HOST);
-      if (hostFacetField == null)
-        return convertObjToString(new SolrDocumentList());
-      List<Count> hostList = hostFacetField.getValues();
-      if (hostList == null)
-        return convertObjToString(new SolrDocumentList());
-      SolrDocumentList docList = response.getResults();
-      String hostName = "";
-      for (Count host : hostList) {
-        SolrDocument solrDoc = new SolrDocument();
-        hostName = host.getName();
-        solrDoc.put(LogSearchConstants.SOLR_HOST, hostName);
-        docList.add(solrDoc);
-      }
+      solrQuery.setParam("event", "/solr/logs_search");
 
-      VGroupList collection = new VGroupList(docList);
-      collection.setStartIndex((int) docList.getStart());
-      collection.setTotalCount(docList.getNumFound());
+      VSolrLogList collection = getLogAsPaginationProvided(solrQuery,
+          serviceLogsSolrDao);
       return convertObjToString(collection);
-    } catch (IOException | SolrServerException | SolrException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
     }
   }
 
-  public VGroupList getComponentList(SearchCriteria searchCriteria) {
-    SolrQuery query = new SolrQuery();
-    query.setParam("event", "/audit/getLiveLogsCount");
-    queryGenerator.setMainQuery(query, null);
-
-    queryGenerator.setGroupField(query, LogSearchConstants.SOLR_COMPONENT,
-      searchCriteria.getMaxRows());
-
-    searchCriteria.setSortBy(LogSearchConstants.SOLR_COMPONENT);
-    queryGenerator.setSortOrderDefaultServiceLog(query, searchCriteria);
-    try {
-      return this.getSolrGroupList(query);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + query, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
+  public String getHosts(SearchCriteria searchCriteria) {
+    return getFields(searchCriteria, LogSearchConstants.SOLR_HOST);
   }
+  
+  public String getFields(SearchCriteria searchCriteria,String field){
 
-  public String getComponents(SearchCriteria searchCriteria) {
     SolrQuery solrQuery = new SolrQuery();
+    VGroupList collection = new VGroupList();
     queryGenerator.setMainQuery(solrQuery, null);
     queryGenerator.setFacetField(solrQuery,
-      LogSearchConstants.SOLR_COMPONENT);
+        field);
     queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      FacetField hostFacetField = response
-        .getFacetField(LogSearchConstants.SOLR_COMPONENT);
-      if (hostFacetField == null)
-        return convertObjToString(new SolrDocumentList());
-      List<Count> componenttList = hostFacetField.getValues();
-      if (componenttList == null)
-        return convertObjToString(new SolrDocumentList());
+      if(response == null){
+        return convertObjToString(collection);
+      }
+      FacetField facetField = response
+        .getFacetField(field);
+      if (facetField == null){
+        return convertObjToString(collection);
+      }
+      List<Count> fieldList = facetField.getValues();
+      if (fieldList == null){
+        return convertObjToString(collection);
+      }
       SolrDocumentList docList = response.getResults();
-      String hostName = "";
-      for (Count component : componenttList) {
+      if(docList == null){
+        return convertObjToString(collection);
+      }
+      String temp = "";
+      for (Count cnt : fieldList) {
         SolrDocument solrDoc = new SolrDocument();
-        hostName = component.getName();
-        solrDoc.put(LogSearchConstants.SOLR_COMPONENT, hostName);
+        temp = cnt.getName();
+        solrDoc.put(field, temp);
         docList.add(solrDoc);
       }
-
-      VGroupList collection = new VGroupList(docList);
-      collection.setStartIndex((int) docList.getStart());
-      collection.setTotalCount(docList.getNumFound());
+      
+      collection.setGroupDocuments(docList);
+      if(!docList.isEmpty()){
+        collection.setStartIndex((int) docList.getStart());
+        collection.setTotalCount(docList.getNumFound());
+      }
       return convertObjToString(collection);
     } catch (IOException | SolrServerException | SolrException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
+  
+  }
+
+  public String getComponents(SearchCriteria searchCriteria) {
+    return getFields(searchCriteria, LogSearchConstants.SOLR_COMPONENT);
   }
 
   public String getAggregatedInfo(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     String hierarchy = "host,type,level";
+    VGraphInfo graphInfo = new VGraphInfo();
     try {
       queryGenerator.setMainQuery(solrQuery, null);
       queryGenerator.setFacetPivot(solrQuery, 1, hierarchy);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null) {
+        return convertObjToString(graphInfo);
+      }
 
       List<List<PivotField>> hirarchicalPivotField = new ArrayList<List<PivotField>>();
       List<VGraphData> dataList = new ArrayList<VGraphData>();
@@ -235,15 +206,18 @@ public class LogsMgr extends MgrBase {
       if (namedList != null) {
         hirarchicalPivotField = namedList.getAll(hierarchy);
       }
-      if (!hirarchicalPivotField.isEmpty())
+      if (!hirarchicalPivotField.isEmpty()) {
         dataList = buidGraphData(hirarchicalPivotField.get(0));
-      VGraphInfo graphInfo = new VGraphInfo();
-      graphInfo.setGraphData(dataList);
+      }
+      if (!dataList.isEmpty()) {
+        graphInfo.setGraphData(dataList);
+      }
+
       return convertObjToString(graphInfo);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -251,109 +225,73 @@ public class LogsMgr extends MgrBase {
     List<VGraphData> logList = new ArrayList<VGraphData>();
     if (pivotFields != null) {
       for (PivotField pivotField : pivotFields) {
-        VGraphData logLevel = new VGraphData();
-        logLevel.setName("" + pivotField.getValue());
-        logLevel.setCount(Long.valueOf(pivotField.getCount()));
-        if (pivotField.getPivot() != null)
-          logLevel.setDataList(buidGraphData(pivotField.getPivot()));
-        logList.add(logLevel);
+        if (pivotField != null) {
+          VGraphData logLevel = new VGraphData();
+          logLevel.setName("" + pivotField.getValue());
+          logLevel.setCount(Long.valueOf(pivotField.getCount()));
+          if (pivotField.getPivot() != null) {
+            logLevel.setDataList(buidGraphData(pivotField.getPivot()));
+          }
+          logList.add(logLevel);
+        }
       }
     }
     return logList;
   }
 
-  public VCountList getLogLevelCount(SearchCriteria searchCriteria) {
+  public VCountList getFieldCount(SearchCriteria searchCriteria, String field){
     VCountList collection = new VCountList();
     List<VCount> vCounts = new ArrayList<VCount>();
     SolrQuery solrQuery = new SolrQuery();
     queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetField(solrQuery, LogSearchConstants.SOLR_LEVEL);
+    if(field == null){
+      return collection;
+    }
+    queryGenerator.setFacetField(solrQuery, field);
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      FacetField hostFacetField = response
-        .getFacetField(LogSearchConstants.SOLR_LEVEL);
-      if (hostFacetField == null)
+      if (response == null){
         return collection;
-      List<Count> levelList = hostFacetField.getValues();
-
-      for (Count level : levelList) {
-        VCount vCount = new VCount();
-        vCount.setName(level.getName());
-        vCount.setCount(level.getCount());
-        vCounts.add(vCount);
       }
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
-
-    collection.setCounts(vCounts);
-    return collection;
-  }
-
-  public VCountList getComponenetsCount(SearchCriteria searchCriteria) {
-    VCountList collection = new VCountList();
-    List<VCount> vCounts = new ArrayList<VCount>();
-    SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetField(solrQuery,
-      LogSearchConstants.SOLR_COMPONENT);
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      FacetField hostFacetField = response
-        .getFacetField(LogSearchConstants.SOLR_COMPONENT);
-      if (hostFacetField == null)
+      FacetField facetFields = response.getFacetField(field);
+      if (facetFields == null){
         return collection;
-      List<Count> componentList = hostFacetField.getValues();
-
-      for (Count component : componentList) {
-        VCount vCount = new VCount();
-        vCount.setName(component.getName());
-        vCount.setCount(component.getCount());
-        vCounts.add(vCount);
+      }
+      List<Count> fieldList = facetFields.getValues();
+      
+      if(fieldList == null){
+        return collection;
+      }
+      
+      for (Count cnt : fieldList) {
+        if (cnt != null) {
+          VCount vCount = new VCount();
+          vCount.setName(cnt.getName());
+          vCount.setCount(cnt.getCount());
+          vCounts.add(vCount);
+        }
       }
 
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-
+    
     collection.setCounts(vCounts);
     return collection;
   }
+  
+  public VCountList getLogLevelCount(SearchCriteria searchCriteria) {
+    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_LEVEL);
+  }
 
-  public VCountList getHostsCount(SearchCriteria searchCriteria) {
-    VCountList collection = new VCountList();
-    List<VCount> vCounts = new ArrayList<VCount>();
-    SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetField(solrQuery, LogSearchConstants.SOLR_HOST);
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      FacetField hostFacetField = response
-        .getFacetField(LogSearchConstants.SOLR_HOST);
-      if (hostFacetField == null)
-        return collection;
-      List<Count> hostList = hostFacetField.getValues();
-
-      for (Count host : hostList) {
-        VCount vCount = new VCount();
-        vCount.setName(host.getName());
-        vCount.setCount(host.getCount());
-        vCounts.add(vCount);
-      }
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-    }
+  public VCountList getComponentsCount(SearchCriteria searchCriteria) {
+    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_COMPONENT);
+  }
 
-    collection.setCounts(vCounts);
-    return collection;
+  public VCountList getHostsCount(SearchCriteria searchCriteria) {
+    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_HOST);
   }
 
   public List<VNode> buidTreeData(List<PivotField> pivotFields,
@@ -364,73 +302,92 @@ public class LogsMgr extends MgrBase {
     if (pivotFields != null) {
       // For Host
       for (PivotField pivotHost : pivotFields) {
-        VNode hostNode = new VNode();
-        hostNode.setName("" + pivotHost.getValue());
-        hostNode.setValue("" + pivotHost.getCount());
-        hostNode.setType(firstPriority);
-        hostNode.setParent(true);
-        hostNode.setRoot(true);
-        PivotField hostPivot = null;
-        for (PivotField searchHost : pivotFieldHost) {
-          if (hostNode.getName().equals(searchHost.getValue())) {
-            hostPivot = searchHost;
-            break;
+        if (pivotHost != null) {
+          VNode hostNode = new VNode();
+          String name = (pivotHost.getValue() == null ? "" : ""+ pivotHost.getValue());
+          String value = "" + pivotHost.getCount();
+          if(!stringUtil.isEmpty(name)){
+            hostNode.setName(name);
           }
-        }
-        List<PivotField> pivotLevelHost = hostPivot.getPivot();
-        if (pivotLevelHost != null) {
-          Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
-          for (PivotField pivotLevel : pivotLevelHost) {
-            VNameValue vnameValue = new VNameValue();
-            vnameValue.setName(((String) pivotLevel.getValue())
-              .toUpperCase());
-
-            vnameValue.setValue("" + pivotLevel.getCount());
-            logLevelCount.add(vnameValue);
-
+          if(!stringUtil.isEmpty(value)){
+            hostNode.setValue(value);
           }
-          hostNode.setLogLevelCount(logLevelCount);
-        }
-
-        query.addFilterQuery(hostQuery);
-        List<PivotField> pivotComponents = pivotHost.getPivot();
-        // For Components
-        if (pivotComponents != null) {
-          Collection<VNode> componentNodes = new ArrayList<VNode>();
-          for (PivotField pivotComp : pivotComponents) {
-            VNode compNode = new VNode();
-            compNode.setName("" + pivotComp.getValue());
-            compNode.setType(secondPriority);
-            compNode.setValue("" + pivotComp.getCount());
-            compNode.setParent(false);
-            compNode.setRoot(false);
-            List<PivotField> pivotLevels = pivotComp.getPivot();
-            if (pivotLevels != null) {
-              Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
-              for (PivotField pivotLevel : pivotLevels) {
+          if(!stringUtil.isEmpty(firstPriority)){
+            hostNode.setType(firstPriority);
+          }
+          
+          hostNode.setParent(true);
+          hostNode.setRoot(true);
+          PivotField hostPivot = null;
+          for (PivotField searchHost : pivotFieldHost) {
+            if (!stringUtil.isEmpty(hostNode.getName())
+                && hostNode.getName().equals(searchHost.getValue())) {
+              hostPivot = searchHost;
+              break;
+            }
+          }
+          List<PivotField> pivotLevelHost = hostPivot.getPivot();
+          if (pivotLevelHost != null) {
+            Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
+            for (PivotField pivotLevel : pivotLevelHost) {
+              if (pivotLevel != null) {
                 VNameValue vnameValue = new VNameValue();
-                vnameValue.setName(((String) pivotLevel
-                  .getValue()).toUpperCase());
-
+                String levelName = (pivotLevel.getValue() == null ? "" : ""
+                    + pivotLevel.getValue());
+                vnameValue.setName(levelName.toUpperCase());
                 vnameValue.setValue("" + pivotLevel.getCount());
                 logLevelCount.add(vnameValue);
-
               }
-              compNode.setLogLevelCount(logLevelCount);
             }
-            componentNodes.add(compNode);
+            hostNode.setLogLevelCount(logLevelCount);
           }
-          hostNode.setChilds(componentNodes);
-        }
-        extensionTree.add(hostNode);
-      }
+
+          query.addFilterQuery(hostQuery);
+          List<PivotField> pivotComponents = pivotHost.getPivot();
+          // For Components
+          if (pivotComponents != null) {
+            Collection<VNode> componentNodes = new ArrayList<VNode>();
+            for (PivotField pivotComp : pivotComponents) {
+              if (pivotComp != null) {
+                VNode compNode = new VNode();
+                String compName = (pivotComp.getValue() == null ? "" : ""
+                    + pivotComp.getValue());
+                compNode.setName(compName);
+                if (!stringUtil.isEmpty(secondPriority)) {
+                  compNode.setType(secondPriority);
+                }
+                compNode.setValue("" + pivotComp.getCount());
+                compNode.setParent(false);
+                compNode.setRoot(false);
+                List<PivotField> pivotLevels = pivotComp.getPivot();
+                if (pivotLevels != null) {
+                  Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
+                  for (PivotField pivotLevel : pivotLevels) {
+                    if (pivotLevel != null) {
+                      VNameValue vnameValue = new VNameValue();
+                      String compLevel = pivotLevel.getValue() == null ? ""
+                          : "" + pivotLevel.getValue();
+                      vnameValue.setName((compLevel).toUpperCase());
+
+                      vnameValue.setValue("" + pivotLevel.getCount());
+                      logLevelCount.add(vnameValue);
+                    }
+                  }
+                  compNode.setLogLevelCount(logLevelCount);
+                }
+                componentNodes.add(compNode);
+              }}
+            hostNode.setChilds(componentNodes);
+          }
+          extensionTree.add(hostNode);
+        }}
     }
 
     return extensionTree;
   }
 
   public VNodeList getTreeExtension(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/getTreeExtension");
 
     if (searchCriteria.getSortBy() == null) {
@@ -441,9 +398,10 @@ public class LogsMgr extends MgrBase {
     String hostName = ""
       + ((searchCriteria.getParamValue("hostName") == null) ? ""
       : searchCriteria.getParamValue("hostName"));
-    if (!"".equals(hostName))
+    if (!stringUtil.isEmpty(hostName)){
       solrQuery.addFilterQuery(LogSearchConstants.SOLR_HOST + ":*"
         + hostName + "*");
+    }
     String firstHirarchy = "host,type,level";
     String secondHirarchy = "host,level";
     VNodeList list = new VNodeList();
@@ -484,15 +442,18 @@ public class LogsMgr extends MgrBase {
       list.setvNodeList(dataList);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     return list;
   }
 
   public String getHostListByComponent(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/getHostListByComponent");
 
+    VNodeList list = new VNodeList();
     if (searchCriteria.getSortBy() == null) {
       searchCriteria.setSortBy(LogSearchConstants.SOLR_HOST);
       searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
@@ -501,18 +462,16 @@ public class LogsMgr extends MgrBase {
     String componentName = ""
       + ((searchCriteria.getParamValue("componentName") == null) ? ""
       : searchCriteria.getParamValue("componentName"));
-    if (!"".equals(componentName))
+    if (!stringUtil.isEmpty(componentName)){
       solrQuery.addFilterQuery(LogSearchConstants.SOLR_COMPONENT + ":"
         + componentName);
-    else
-      try {
-        return convertObjToString(new VNodeList());
-      } catch (IOException e1) {
-        logger.error(e1);
-      }
+    } else {
+      return convertObjToString(list);
+    }
+    
     String firstHirarchy = "type,host,level";
     String secondHirarchy = "type,level";
-    VNodeList list = new VNodeList();
+   
     try {
       queryGenerator.setFacetPivot(solrQuery, 1, firstHirarchy,
         secondHirarchy);
@@ -530,26 +489,29 @@ public class LogsMgr extends MgrBase {
 
       if (firstHirarchicalPivotFields == null
         || secondHirarchicalPivotFields == null) {
-        return convertObjToString(new ArrayList<VNode>());
+        return convertObjToString(list);
       }
 
       List<VNode> dataList = buidTreeData(
         firstHirarchicalPivotFields.get(0),
         secondHirarchicalPivotFields.get(0), solrQuery,
         LogSearchConstants.COMPONENT, LogSearchConstants.HOST);
+      if(dataList == null){
+        return convertObjToString(list);
+      }
 
       list.setvNodeList(dataList);
       return convertObjToString(list);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public VNameValueList getLogsLevelCount(SearchCriteria sc) {
     VNameValueList nameValueList = new VNameValueList();
-    SolrQuery query = queryGenerator.commonFilterQuery(sc);
+    SolrQuery query = queryGenerator.commonServiceFilterQuery(sc);
     query.setParam("event", "/getLogLevelCounts");
     List<VNameValue> logsCounts = getLogLevelFacets(query);
     nameValueList.setVNameValues(logsCounts);
@@ -558,6 +520,7 @@ public class LogsMgr extends MgrBase {
   }
 
   public List<VNameValue> getLogLevelFacets(SolrQuery query) {
+    String defalutValue = "0";
     HashMap<String, String> map = new HashMap<String, String>();
     List<VNameValue> logsCounts = new ArrayList<VNameValue>();
     try {
@@ -566,6 +529,9 @@ public class LogsMgr extends MgrBase {
 
       List<Count> logLevelCounts = getFacetCounts(query,
         LogSearchConstants.SOLR_LEVEL);
+      if(logLevelCounts == null){
+        return logsCounts;
+      }
       for (Count count : logLevelCounts) {
         map.put(count.getName().toUpperCase(), "" + count.getCount());
       }
@@ -573,8 +539,9 @@ public class LogsMgr extends MgrBase {
       VNameValue nameValue = null;
 
       String value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -583,8 +550,9 @@ public class LogsMgr extends MgrBase {
       level = LogSearchConstants.ERROR;
 
       value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -593,8 +561,9 @@ public class LogsMgr extends MgrBase {
       level = LogSearchConstants.WARN;
 
       value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -603,8 +572,9 @@ public class LogsMgr extends MgrBase {
       level = LogSearchConstants.INFO;
 
       value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -613,8 +583,9 @@ public class LogsMgr extends MgrBase {
       level = LogSearchConstants.DEBUG;
 
       value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -623,8 +594,9 @@ public class LogsMgr extends MgrBase {
       level = LogSearchConstants.TRACE;
 
       value = map.get(level);
-      if (value == null || value.equals(""))
-        value = "0";
+      if (stringUtil.isEmpty(value)){
+        value = defalutValue;
+      }
       nameValue = new VNameValue();
       nameValue.setName(level);
       nameValue.setValue(value);
@@ -639,25 +611,46 @@ public class LogsMgr extends MgrBase {
   // Get Facet Count According to FacetFeild
   public List<Count> getFacetCounts(SolrQuery solrQuery, String facetField)
     throws SolrServerException, IOException, SolrException {
-
+    List<Count> list = new ArrayList<FacetField.Count>();
+    
     QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-
+    if(response == null){
+      return list;
+    }
+    
     FacetField field = response.getFacetField(facetField);
     if (field == null) {
-      return new ArrayList<FacetField.Count>();
+      return list;
     }
-    return field.getValues();
+    list = field.getValues();
+    
+    
+    return list;
   }
 
   public String getPageByKeyword(SearchCriteria searchCriteria)
     throws SolrServerException {
-
-    String keyword = solrUtil.makeSolrSearchString((String) searchCriteria
-      .getParamValue("keyword"));
+    String defaultChoice = "0";
+    
+    String key = (String) searchCriteria.getParamValue("keyword");
+    if(stringUtil.isEmpty(key)){
+      throw restErrorUtil.createRESTException("Keyword was not given",
+          MessageEnums.DATA_NOT_FOUND);
+    }
+    
+    String keyword = solrUtil.escapeForStandardTokenizer(key);
+    
+    if(keyword.startsWith("\"") && keyword.endsWith("\"")){
+      keyword = keyword.substring(1);
+      keyword = keyword.substring(0, keyword.length()-1);
+    }
+    keyword = "*" + keyword + "*";
+   
 
     String keyType = (String) searchCriteria.getParamValue("keywordType");
+    QueryResponse queryResponse = null;
 
-    if (!(boolean) "0".equals(keyType)) {
+    if (!defaultChoice.equals(keyType)) {
       try {
         int currentPageNumber = searchCriteria.getPage();
         int maxRows = searchCriteria.getMaxRows();
@@ -670,46 +663,72 @@ public class LogsMgr extends MgrBase {
 
         // Next Page Start Time Calculation
         SolrQuery nextPageLogTimeQuery = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         nextPageLogTimeQuery.remove("start");
         nextPageLogTimeQuery.remove("rows");
         nextPageLogTimeQuery.setStart(lastLogIndexNumber);
         nextPageLogTimeQuery.setRows(1);
+        
+        queryResponse = serviceLogsSolrDao.process(
+            nextPageLogTimeQuery);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
 
-        SolrDocumentList docList = serviceLogsSolrDao.process(
-          nextPageLogTimeQuery).getResults();
+        SolrDocumentList docList = queryResponse.getResults();
+        if(docList ==null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        
         SolrDocument solrDoc = docList.get(0);
 
         Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
+        if(logDate == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         nextPageLogTime = dateUtil
           .convertDateWithMillisecondsToSolrDate(logDate);
         nextPageLogID = ""
           + solrDoc.get(LogSearchConstants.ID);
 
-        if (stringUtil.isEmpty(nextPageLogID))
+        if (stringUtil.isEmpty(nextPageLogID)){
           nextPageLogID = "0";
+        }
 
         String filterQueryListIds = "";
         // Remove the same Time Ids
         SolrQuery listRemoveIds = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         listRemoveIds.remove("start");
         listRemoveIds.remove("rows");
         queryGenerator.setSingleIncludeFilter(listRemoveIds,
           LogSearchConstants.LOGTIME, "\"" + nextPageLogTime + "\"");
         queryGenerator.setSingleExcludeFilter(listRemoveIds,
           LogSearchConstants.ID, nextPageLogID);
-        listRemoveIds.set("fl", LogSearchConstants.ID);
-        SolrDocumentList docListIds = serviceLogsSolrDao.process(
-          listRemoveIds).getResults();
+        queryGenerator.setFl(listRemoveIds, LogSearchConstants.ID);
+        queryResponse = serviceLogsSolrDao.process(
+            listRemoveIds);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docListIds = queryResponse.getResults();
+        if(docListIds ==null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         boolean isFirst = true;
-        for (SolrDocument solrDocId : docListIds) {
+        for (SolrDocument solrDocId :  docListIds ) {
           String id = "" + solrDocId.get(LogSearchConstants.ID);
           if (isFirst) {
-            filterQueryListIds += "-" + LogSearchConstants.ID + ":" + id;
+            filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
             isFirst = false;
           } else {
-            filterQueryListIds += " AND " + "-" + LogSearchConstants.ID + ":" + id;
+            filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
           }
         }
 
@@ -718,54 +737,62 @@ public class LogsMgr extends MgrBase {
         String startTime = (String) searchCriteria
           .getParamValue("from");
         SolrQuery logTimeThroughRangeQuery = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         logTimeThroughRangeQuery.remove("start");
         logTimeThroughRangeQuery.remove("rows");
         logTimeThroughRangeQuery.setRows(1);
-        if (!stringUtil.isEmpty(filterQueryListIds))
+        if (!stringUtil.isEmpty(filterQueryListIds)){
           logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
-
+        }
 
         String sortByType = searchCriteria.getSortType();
 
         if (!stringUtil.isEmpty(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
           
-          /*sequenceNumber =""+( Integer.parseInt(sequenceNumber) - 1);*/
-          /*queryGenerator.setSingleRangeFilter(
-              logTimeThroughRangeQuery,
-              LogSearchConstants.SEQUNCE_ID, "*",sequenceNumber);*/
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, nextPageLogTime,
             endTime);
-          logTimeThroughRangeQuery.set("sort",
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.ASCENDING_ORDER);
 
         } else {
-          /*sequenceNumber =""+( Integer.parseInt(sequenceNumber) + 1);*/
-          /*queryGenerator.setSingleRangeFilter(
-              logTimeThroughRangeQuery,
-              LogSearchConstants.SEQUNCE_ID, sequenceNumber, "*");*/
+          
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, startTime,
             nextPageLogTime);
-          logTimeThroughRangeQuery.set("sort",
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.DESCENDING_ORDER);
         }
         queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
-          LogSearchConstants.SOLR_LOG_MESSAGE, keyword);
+          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
+
 
+        queryResponse = serviceLogsSolrDao.process(
+            logTimeThroughRangeQuery);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
 
-        SolrDocumentList documentList = serviceLogsSolrDao.process(
-          logTimeThroughRangeQuery).getResults();
+        SolrDocumentList documentList = queryResponse.getResults();
+        if(documentList ==null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
 
         SolrDocument solrDocument = new SolrDocument();
-        if (!documentList.isEmpty())
+        if (!documentList.isEmpty()){
           solrDocument = documentList.get(0);
-        /*String keywordLogSequenceNumber = ""+ solrDocument.get(LogSearchConstants.SEQUNCE_ID);*/
+        }
+        
         Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
+        if(keywordLogDate == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         String originalKeywordDate = dateUtil
           .convertDateWithMillisecondsToSolrDate(keywordLogDate);
         String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
@@ -783,16 +810,7 @@ public class LogsMgr extends MgrBase {
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, startTime,
             keywordDateTime);
-          /*queryGenerator
-          .setSingleRangeFilter(rangeLogQuery,
-              LogSearchConstants.SEQUNCE_ID,"*", keywordLogSequenceNumber);*/
-
-
         } else {
-          /*queryGenerator
-          .setSingleRangeFilter(rangeLogQuery,
-              LogSearchConstants.SEQUNCE_ID, keywordLogSequenceNumber,
-              "*"); */
           keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, -1);
           String keywordDateTime = dateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
@@ -803,35 +821,27 @@ public class LogsMgr extends MgrBase {
 
 
         long countNumberLogs = countQuery(rangeLogQuery) - 1;
-        
-        /*// Delete Duplicate entries
-        SolrQuery duplicatesLogQuery = nextPageLogTimeQuery.getCopy();
-        duplicatesLogQuery.remove("start");
-        duplicatesLogQuery.remove("rows");
-        queryGenerator.setSingleIncludeFilter(duplicatesLogQuery,
-            LogSearchConstants.LOGTIME, "\"" + keywordLogTime
-                + "\"");
-
-        countNumberLogs = countNumberLogs
-            - countQuery(duplicatesLogQuery);*/
+      
 
         //Adding numbers on 
 
 
         try {
           SolrQuery sameIdQuery = queryGenerator
-            .commonFilterQuery(searchCriteria);
+            .commonServiceFilterQuery(searchCriteria);
           queryGenerator.setSingleIncludeFilter(sameIdQuery,
             LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          sameIdQuery.set("fl", LogSearchConstants.ID);
+          queryGenerator.setFl(sameIdQuery, LogSearchConstants.ID);
           SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
             .getResults();
           for (SolrDocument solrDocumenent : sameQueryDocList) {
             String id = (String) solrDocumenent
               .getFieldValue(LogSearchConstants.ID);
             countNumberLogs++;
-            if (id.equals(keywordId))
+           
+            if (stringUtil.isEmpty(id) && id.equals(keywordId)){
               break;
+            }
           }
         } catch (SolrException | SolrServerException | IOException e) {
           logger.error(e);
@@ -854,11 +864,10 @@ public class LogsMgr extends MgrBase {
       try {
         int currentPageNumber = searchCriteria.getPage();
         int maxRows = searchCriteria.getMaxRows();
-        String sequenceNumber = "";
 
         if (currentPageNumber == 0) {
           throw restErrorUtil.createRESTException("This is first Page Not",
-            MessageEnums.ERROR_SYSTEM);
+            MessageEnums.DATA_NOT_FOUND);
         }
 
         int firstLogCurrentPage = (currentPageNumber * maxRows);
@@ -866,15 +875,25 @@ public class LogsMgr extends MgrBase {
 
         // Next Page Start Time Calculation
         SolrQuery lastLogTime = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         lastLogTime.remove("start");
         lastLogTime.remove("rows");
 
         lastLogTime.setStart(firstLogCurrentPage);
         lastLogTime.setRows(1);
 
-        SolrDocumentList docList = serviceLogsSolrDao.process(
-          lastLogTime).getResults();
+        queryResponse = serviceLogsSolrDao.process(
+            lastLogTime);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docList = queryResponse.getResults();
+        if(docList ==null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         SolrDocument solrDoc = docList.get(0);
 
         Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
@@ -882,32 +901,43 @@ public class LogsMgr extends MgrBase {
         lastLogsLogTime = dateUtil
           .convertDateWithMillisecondsToSolrDate(logDate);
         String lastLogsLogId = ""
-          + solrDoc.get(LogSearchConstants.SEQUNCE_ID);
-        if (stringUtil.isEmpty(sequenceNumber))
-          sequenceNumber = "0";
+          + solrDoc.get(LogSearchConstants.ID);
 
 
         String filterQueryListIds = "";
         // Remove the same Time Ids
         SolrQuery listRemoveIds = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         listRemoveIds.remove("start");
         listRemoveIds.remove("rows");
         queryGenerator.setSingleIncludeFilter(listRemoveIds,
           LogSearchConstants.LOGTIME, "\"" + lastLogsLogTime + "\"");
         queryGenerator.setSingleExcludeFilter(listRemoveIds,
           LogSearchConstants.ID, lastLogsLogId);
-        listRemoveIds.set("fl", LogSearchConstants.ID);
-        SolrDocumentList docListIds = serviceLogsSolrDao.process(
-          listRemoveIds).getResults();
+        queryGenerator.setFl(listRemoveIds, LogSearchConstants.ID);
+        queryResponse = serviceLogsSolrDao.process(
+            lastLogTime);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docListIds = queryResponse.getResults();
+        if(docListIds == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         boolean isFirst = true;
         for (SolrDocument solrDocId : docListIds) {
-          String id = "" + solrDocId.get(LogSearchConstants.ID);
-          if (isFirst) {
-            filterQueryListIds += "-" + LogSearchConstants.ID + ":" + id;
-            isFirst = false;
-          } else {
-            filterQueryListIds += " AND " + "-" + LogSearchConstants.ID + ":" + id;
+          if (solrDocId != null) {
+            String id = "" + solrDocId.get(LogSearchConstants.ID);
+            if (isFirst) {
+              filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
+              isFirst = false;
+            } else {
+              filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":"
+                  + id;
+            }
           }
         }
 
@@ -917,60 +947,67 @@ public class LogsMgr extends MgrBase {
         String startTime = (String) searchCriteria
           .getParamValue("from");
         SolrQuery logTimeThroughRangeQuery = queryGenerator
-          .commonFilterQuery(searchCriteria);
+          .commonServiceFilterQuery(searchCriteria);
         logTimeThroughRangeQuery.remove("start");
         logTimeThroughRangeQuery.remove("rows");
         logTimeThroughRangeQuery.setRows(1);
         queryGenerator.setSingleExcludeFilter(logTimeThroughRangeQuery,
           LogSearchConstants.ID, lastLogsLogId);
-        if (!stringUtil.isEmpty(filterQueryListIds))
+        if (!stringUtil.isEmpty(filterQueryListIds)){
           logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
+        }
 
         if (!stringUtil.isEmpty(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
 
-          sequenceNumber = ""
-            + (Integer.parseInt(sequenceNumber) - 1);
-          logTimeThroughRangeQuery.remove("sort");
-          logTimeThroughRangeQuery.set("sort",
+          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.DESCENDING_ORDER);
           
-          /*queryGenerator.setSingleRangeFilter(
-              logTimeThroughRangeQuery,
-              LogSearchConstants.SEQUNCE_ID,"*", sequenceNumber);*/
+          
           queryGenerator.setSingleRangeFilter(
             logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, startTime,
             lastLogsLogTime);
 
         } else {
-          sequenceNumber = "" + (Integer.parseInt(sequenceNumber) + 1);
 
-          logTimeThroughRangeQuery.remove("sort");
-          logTimeThroughRangeQuery.set("sort",
+          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.ASCENDING_ORDER);
           
-          /*queryGenerator.setSingleRangeFilter(
-              logTimeThroughRangeQuery,
-              LogSearchConstants.SEQUNCE_ID, sequenceNumber,"*");*/
+
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, lastLogsLogTime, endTime);
         }
         queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
-          LogSearchConstants.SOLR_LOG_MESSAGE, keyword);
+          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
+
 
+        queryResponse = serviceLogsSolrDao.process(
+            logTimeThroughRangeQuery);
+        if(queryResponse == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
 
-        SolrDocumentList documentList = serviceLogsSolrDao.process(
-          logTimeThroughRangeQuery).getResults();
+        SolrDocumentList documentList = queryResponse.getResults();
+        if(documentList == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         SolrDocument solrDocument = new SolrDocument();
-        if (!documentList.isEmpty())
+        if (!documentList.isEmpty()){
           solrDocument = documentList.get(0);
+        }
 
-        
-        /*String keywordLogSequenceNumber = ""+ solrDocument.get(LogSearchConstants.SEQUNCE_ID);*/
         Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
+        if(keywordLogDate == null){
+          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
         String originalKeywordDate = dateUtil
           .convertDateWithMillisecondsToSolrDate(keywordLogDate);
         String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
@@ -982,23 +1019,16 @@ public class LogsMgr extends MgrBase {
 
         if (!stringUtil.isEmpty(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-          keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, 1);
+       //   keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, 1);
           String keywordDateTime = dateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, startTime,
             keywordDateTime);
-          /*queryGenerator
-          .setSingleRangeFilter(rangeLogQuery,
-              LogSearchConstants.SEQUNCE_ID,"*", keywordLogSequenceNumber);*/
 
 
         } else {
-          /*queryGenerator
-          .setSingleRangeFilter(rangeLogQuery,
-              LogSearchConstants.SEQUNCE_ID, keywordLogSequenceNumber,
-              "*"); */
-          keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, -1);
+     //     keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, -1);
           String keywordDateTime = dateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
@@ -1009,34 +1039,24 @@ public class LogsMgr extends MgrBase {
 
         long countNumberLogs = countQuery(rangeLogQuery) - 1;
         
-        /*// Delete Duplicate entries
-        SolrQuery duplicatesLogQuery = nextPageLogTimeQuery.getCopy();
-        duplicatesLogQuery.remove("start");
-        duplicatesLogQuery.remove("rows");
-        queryGenerator.setSingleIncludeFilter(duplicatesLogQuery,
-            LogSearchConstants.LOGTIME, "\"" + keywordLogTime
-                + "\"");
-
-        countNumberLogs = countNumberLogs
-            - countQuery(duplicatesLogQuery);*/
-
-        //Adding numbers on 
-
-
+        //Adding numbers on
         try {
           SolrQuery sameIdQuery = queryGenerator
-            .commonFilterQuery(searchCriteria);
+            .commonServiceFilterQuery(searchCriteria);
           queryGenerator.setSingleIncludeFilter(sameIdQuery,
             LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          sameIdQuery.set("fl", LogSearchConstants.ID);
+          queryGenerator.setFl(sameIdQuery, LogSearchConstants.ID);
           SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
             .getResults();
           for (SolrDocument solrDocumenent : sameQueryDocList) {
-            String id = (String) solrDocumenent
-              .getFieldValue(LogSearchConstants.ID);
-            countNumberLogs++;
-            if (id.equals(keywordId))
-              break;
+            if (solrDocumenent != null) {
+              String id = (String) solrDocumenent
+                  .getFieldValue(LogSearchConstants.ID);
+              countNumberLogs++;
+              if ( stringUtil.isEmpty(id) && id.equals(keywordId)) {
+                break;
+              }
+            }
           }
         } catch (SolrException | SolrServerException | IOException e) {
           logger.error(e);
@@ -1055,92 +1075,23 @@ public class LogsMgr extends MgrBase {
       }
 
     }
-    throw restErrorUtil.createRESTException("keyword not found",
-      MessageEnums.ERROR_SYSTEM);
-  }
-
-  public String getPageByKeyword1(SearchCriteria searchCriteria)
-    throws SolrServerException {
-
-    SolrQuery query = queryGenerator.commonFilterQuery(searchCriteria);
-    String keyword = solrUtil.makeSearcableString((String) searchCriteria
-      .getParamValue("keyword"));
-    String uniqueId = (String) searchCriteria.getParamValue("token");
-    if (uniqueId != null && !uniqueId.equals(""))
-      cancelByDate.add(uniqueId);
-    Long numberPages = 0l;
-    int currentPageNumber = searchCriteria.getPage();
-    try {
-      numberPages = countQuery(query) / searchCriteria.getMaxRows();
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-    }
-    if ((boolean) searchCriteria.getParamValue("keywordType").equals("0")) {
-      for (int i = currentPageNumber - 1; i >= 0
-        && !cancelRequest(uniqueId); i--) {
-        mapUniqueId.put(uniqueId, "" + i);
-        query.remove("rows");
-        query.remove("start");
-        query.setStart(i * searchCriteria.getMaxRows());
-        query.setRows(searchCriteria.getMaxRows());
-        VSolrLogList vSolrLogList = getLogAsPaginationProvided(query, serviceLogsSolrDao);
-        SolrDocumentList documentList = vSolrLogList.getList();
-        for (SolrDocument solrDoc : documentList) {
-          String log_message = solrUtil
-            .makeSearcableString((String) solrDoc
-              .getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE));
-          if (log_message != null
-            && log_message
-            .toLowerCase(Locale.ENGLISH)
-            .contains(
-              keyword.toLowerCase(Locale.ENGLISH))) {
-            cancelByDate.remove(uniqueId);
-            try {
-              return convertObjToString(vSolrLogList);
-            } catch (IOException e) {
-              logger.error(e);
-            }
-          }
-        }
-      }
-
-    } else {
-      for (int i = currentPageNumber + 1; i <= numberPages
-        && !cancelRequest(uniqueId); i++) {
-        mapUniqueId.put(uniqueId, "" + i);
-        query.remove("rows");
-        query.remove("start");
-        query.setStart(i * searchCriteria.getMaxRows());
-        query.setRows(searchCriteria.getMaxRows());
-        VSolrLogList vSolrLogList = getLogAsPaginationProvided(query, serviceLogsSolrDao);
-        SolrDocumentList solrDocumentList = vSolrLogList.getList();
-        for (SolrDocument solrDocument : solrDocumentList) {
-          String logMessage = solrUtil
-            .makeSearcableString((String) solrDocument
-              .getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE));
-          if (logMessage != null
-            && logMessage.toLowerCase(Locale.ENGLISH).contains(
-            keyword.toLowerCase(Locale.ENGLISH))) {
-            cancelByDate.remove(uniqueId);
-            try {
-              return convertObjToString(vSolrLogList);
-            } catch (SolrException | IOException e) {
-              logger.error(e);
-            }
-          }
-        }
-      }
-    }
-    throw restErrorUtil.createRESTException("keyword not found",
-      MessageEnums.ERROR_SYSTEM);
+    throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+        MessageEnums.ERROR_SYSTEM);
   }
 
   private String getPageByLogId(SearchCriteria searchCriteria) {
+    VSolrLogList vSolrLogList = new VSolrLogList();
     String endLogTime = (String) searchCriteria.getParamValue("to");
+    if(stringUtil.isEmpty(endLogTime)){
+      return convertObjToString(vSolrLogList);
+    }
     long startIndex = 0l;
 
     String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    if(stringUtil.isEmpty(logId)){
+      return convertObjToString(vSolrLogList);
+    }
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
 
     String endTimeMinusOneMilli = "";
     String logTime = "";
@@ -1149,19 +1100,32 @@ public class LogsMgr extends MgrBase {
       SolrQuery logTimeByIdQuery = new SolrQuery();
       queryGenerator.setMainQuery(logTimeByIdQuery, null);
       queryGenerator.setSingleIncludeFilter(logTimeByIdQuery,
-        LogSearchConstants.ID, logId);
+          LogSearchConstants.ID, logId);
       queryGenerator.setRowCount(solrQuery, 1);
 
-      SolrDocumentList docList = serviceLogsSolrDao.process(
-        logTimeByIdQuery).getResults();
-      Date dateOfLogId = (Date) docList.get(0).get(
-        LogSearchConstants.LOGTIME);
+      QueryResponse queryResponse = serviceLogsSolrDao
+          .process(logTimeByIdQuery);
+
+      if(queryResponse == null){
+        return convertObjToString(new VSolrLogList()); 
+      }
+      
+      SolrDocumentList docList = queryResponse.getResults();
+      Date dateOfLogId = null;
+      if (docList != null && !docList.isEmpty()) {
+        SolrDocument dateLogIdDoc = docList.get(0);
+        if(dateLogIdDoc != null){
+          dateOfLogId = (Date) dateLogIdDoc.get(LogSearchConstants.LOGTIME);
+        }
+      }
+
+      if (dateOfLogId != null) {
+        logTime = dateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
+        Date endDate = dateUtil.addMilliSecondsToDate(dateOfLogId, 1);
+        endTimeMinusOneMilli = (String) dateUtil
+            .convertDateWithMillisecondsToSolrDate(endDate);
+      }
 
-      logTime = dateUtil
-        .convertDateWithMillisecondsToSolrDate(dateOfLogId);
-      Date endDate = dateUtil.addMilliSecondsToDate(dateOfLogId, 1);
-      endTimeMinusOneMilli = (String) dateUtil
-        .convertDateWithMillisecondsToSolrDate(endDate);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error(e);
     }
@@ -1170,8 +1134,7 @@ public class LogsMgr extends MgrBase {
       solrQuery.remove(LogSearchConstants.ID);
       solrQuery.remove(LogSearchConstants.LOGTIME);
       queryGenerator.setSingleRangeFilter(solrQuery,
-        LogSearchConstants.LOGTIME, endTimeMinusOneMilli,
-        endLogTime);
+          LogSearchConstants.LOGTIME, endTimeMinusOneMilli, endLogTime);
       queryGenerator.setRowCount(solrQuery, 0);
       startIndex = countQuery(solrQuery);
     } catch (SolrException | SolrServerException | IOException e) {
@@ -1179,37 +1142,41 @@ public class LogsMgr extends MgrBase {
     }
 
     try {
-      SolrQuery sameIdQuery = queryGenerator
-        .commonFilterQuery(searchCriteria);
+      SolrQuery sameIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
       queryGenerator.setSingleIncludeFilter(sameIdQuery,
-        LogSearchConstants.LOGTIME, "\"" + logTime + "\"");
+          LogSearchConstants.LOGTIME, "\"" + logTime + "\"");
       sameIdQuery.set("fl", LogSearchConstants.ID);
-      SolrDocumentList docList = serviceLogsSolrDao.process(sameIdQuery)
-        .getResults();
+      
+      QueryResponse sameIdResponse = serviceLogsSolrDao.process(sameIdQuery);
+      SolrDocumentList docList = sameIdResponse.getResults();
+      
       for (SolrDocument solrDocumenent : docList) {
         String id = (String) solrDocumenent
-          .getFieldValue(LogSearchConstants.ID);
+            .getFieldValue(LogSearchConstants.ID);
         startIndex++;
-        if (id.equals(logId))
-          break;
+        if (!stringUtil.isEmpty(id)) {
+          if (id.equals(logId)) {
+            break;
+          }
+        }
       }
 
-      SolrQuery logIdQuery = queryGenerator
-        .commonFilterQuery(searchCriteria);
+      SolrQuery logIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
       logIdQuery.remove("rows");
       logIdQuery.remove("start");
       int start = (int) ((startIndex / searchCriteria.getMaxRows()) * searchCriteria
-        .getMaxRows());
+          .getMaxRows());
       logIdQuery.setStart(start);
       logIdQuery.setRows(searchCriteria.getMaxRows());
-      VSolrLogList vSolrLogList = getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
+      vSolrLogList = getLogAsPaginationProvided(logIdQuery,
+          serviceLogsSolrDao);
       return convertObjToString(vSolrLogList);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error(e);
     }
 
     throw restErrorUtil.createRESTException("LogId not Found",
-      MessageEnums.ERROR_SYSTEM);
+        MessageEnums.ERROR_SYSTEM);
   }
 
   @SuppressWarnings("unchecked")
@@ -1221,17 +1188,29 @@ public class LogsMgr extends MgrBase {
       queryGenerator.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
         from, to, unit);
 
-      List<RangeFacet.Count> logLevelCounts;
+      List<RangeFacet.Count> logLevelCounts = null;
 
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if(response == null){
+        return logsCounts;
+      }
       @SuppressWarnings("rawtypes")
-      List<RangeFacet> rangeFacet = response.getFacetRanges();
+      List<RangeFacet> rangeFacetList = response.getFacetRanges();
+      if (rangeFacetList == null) {
+        return logsCounts;
 
+      }
+      
+      @SuppressWarnings("rawtypes")
+      RangeFacet rangeFacet=rangeFacetList.get(0);
       if (rangeFacet == null) {
-        return new ArrayList<VNameValue>();
-
+        return logsCounts;
+      }
+      logLevelCounts = rangeFacet.getCounts();
+      
+      if(logLevelCounts == null){
+        return logsCounts;
       }
-      logLevelCounts = rangeFacet.get(0).getCounts();
       for (RangeFacet.Count logCount : logLevelCounts) {
         VNameValue nameValue = new VNameValue();
         nameValue.setName(logCount.getValue());
@@ -1256,12 +1235,13 @@ public class LogsMgr extends MgrBase {
 
   @SuppressWarnings("unchecked")
   public String getHistogramData(SearchCriteria searchCriteria) {
+    String deafalutValue = "0";
     VBarDataList dataList = new VBarDataList();
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.set("event", "/getHistogramData");
-    String from = (String) searchCriteria.getParamValue("from");
-    String to = (String) searchCriteria.getParamValue("to");
-    String unit = (String) searchCriteria.getParamValue("unit");
+    String from = getFrom((String) searchCriteria.getParamValue("from"));
+    String to = getTo((String) searchCriteria.getParamValue("to"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
 
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
     List<String> logLevels = ConfigUtil.logLevels;
@@ -1274,17 +1254,18 @@ public class LogsMgr extends MgrBase {
 
     try {
       queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery, 0);
+      queryGenerator.setRowCount(solrQuery,Integer.parseInt(deafalutValue));
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if (response == null)
-        response = new QueryResponse();
-
+      if (response == null){
+        return convertObjToString(dataList);
+      }
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
         .getResponse().get("facets");
 
       if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}"))
+        || jsonFacetResponse.toString().equals("{count=0}")){
         return convertObjToString(dataList);
+      }
 
       extractValuesFromBuckets(jsonFacetResponse, "x", "y", histogramData);
 
@@ -1305,7 +1286,7 @@ public class LogsMgr extends MgrBase {
               .getDataCount();
             for (VNameValue value : vNameValues2) {
               VNameValue value2 = new VNameValue();
-              value2.setValue("0");
+              value2.setValue(deafalutValue);
               value2.setName(value.getName());
               vNameValues.add(value2);
             }
@@ -1326,8 +1307,8 @@ public class LogsMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
 
     }
   }
@@ -1360,27 +1341,53 @@ public class LogsMgr extends MgrBase {
   }
 
   public boolean cancelRequest(String uniqueId) {
+    if (stringUtil.isEmpty(uniqueId)) {
+      logger.error("Unique id is Empty");
+      throw restErrorUtil.createRESTException("Unique id is Empty",
+        MessageEnums.DATA_NOT_FOUND);
+    }
     for (String date : cancelByDate) {
-      if (uniqueId.equalsIgnoreCase(date))
+      if (uniqueId.equalsIgnoreCase(date)){
         return false;
+      }
     }
     return true;
   }
 
   public Response exportToTextFile(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    String defaultFormat = "text";
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     String from = (String) searchCriteria.getParamValue("from");
-    from = from.replace("T", " ");
-    from = from.replace(".", ",");
     String to = (String) searchCriteria.getParamValue("to");
-    to = to.replace("T", " ");
-    to = to.replace(".", ",");
-
     String utcOffset = (String) searchCriteria.getParamValue("utcOffset");
-    to = dateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
-      "yyyy-MM-dd HH:mm:ss,SSS");
-    from = dateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
-      "yyyy-MM-dd HH:mm:ss,SSS");
+    String format = (String) searchCriteria.getParamValue("format");
+    
+    format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
+        : ".json";
+    
+    if(stringUtil.isEmpty(utcOffset)){
+      utcOffset = "0";
+    }
+    
+    if (!dateUtil.isDateValid(from) || !dateUtil.isDateValid(to)) {
+      logger.error("Not valid date format. Valid format should be"
+          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
+      throw restErrorUtil.createRESTException("Not valid date format. Valid format should be"
+          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z,
+          MessageEnums.INVALID_INPUT_DATA);
+      
+    } else {
+      from = from.replace("T", " ");
+      from = from.replace(".", ",");
+
+      to = to.replace("T", " ");
+      to = to.replace(".", ",");
+
+      to = dateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
+          "yyyy-MM-dd HH:mm:ss,SSS");
+      from = dateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
+          "yyyy-MM-dd HH:mm:ss,SSS");
+    }
 
     String fileName = dateUtil.getCurrentDateInString();
     if (searchCriteria.getParamValue("hostLogFile") != null
@@ -1388,79 +1395,88 @@ public class LogsMgr extends MgrBase {
       fileName = searchCriteria.getParamValue("hostLogFile") + "_"
         + searchCriteria.getParamValue("compLogFile");
     }
-    String format = (String) searchCriteria.getParamValue("format");
-    format = "text".equalsIgnoreCase(format) && format != null ? ".txt"
-      : ".json";
+    
     String textToSave = "";
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null) {
+        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
       SolrDocumentList docList = response.getResults();
+      if (docList == null) {
+        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+
       VSummary vsummary = bizUtil.buildSummaryForLogFile(docList);
       vsummary.setFormat(format);
       vsummary.setFrom(from);
       vsummary.setTo(to);
 
-      try {
-        String include[] = ((String) searchCriteria
-          .getParamValue("iMessage"))
-          .split(LogSearchConstants.I_E_SEPRATOR);
-        String includeString = "";
-        for (String inc : include) {
-          includeString = includeString + ",\"" + inc + "\"";
-        }
-        includeString = includeString.replaceFirst(",", "");
-        if (!stringUtil.isEmpty(includeString)) {
-          vsummary.setIncludeString(includeString);
-        }
-      } catch (Exception e) {
-        // do nothing
+      String includeString = (String) searchCriteria.getParamValue("iMessage");
+      if (stringUtil.isEmpty(includeString)) {
+        includeString = "";
+      }
+
+      String include[] = includeString.split(LogSearchConstants.I_E_SEPRATOR);
+
+      for (String inc : include) {
+        includeString = includeString + ",\"" + inc + "\"";
+      }
+      includeString = includeString.replaceFirst(",", "");
+      if (!stringUtil.isEmpty(includeString)) {
+        vsummary.setIncludeString(includeString);
       }
 
-      String excludeString = "";
+      String excludeString = null;
       boolean isNormalExcluded = false;
-      try {
-        String exclude[] = ((String) searchCriteria
-          .getParamValue("eMessage"))
-          .split(LogSearchConstants.I_E_SEPRATOR);
-        for (String exc : exclude) {
-          excludeString = excludeString + ",\"" + exc + "\"";
-        }
-        excludeString = excludeString.replaceFirst(",", "");
-        if (!stringUtil.isEmpty(excludeString)) {
-          vsummary.setExcludeString(excludeString);
-          isNormalExcluded = true;
-        }
-      } catch (Exception ne) {
-        // do nothing
+
+      excludeString = (String) searchCriteria.getParamValue("eMessage");
+      if (stringUtil.isEmpty(excludeString)) {
+        excludeString = "";
+      }
+
+      String exclude[] = excludeString.split(LogSearchConstants.I_E_SEPRATOR);
+      for (String exc : exclude) {
+        excludeString = excludeString + ",\"" + exc + "\"";
+      }
+
+      excludeString = excludeString.replaceFirst(",", "");
+      if (!stringUtil.isEmpty(excludeString)) {
+        vsummary.setExcludeString(excludeString);
+        isNormalExcluded = true;
+      }
+
+      String globalExcludeString = (String) searchCriteria
+          .getParamValue("gEMessage");
+      if (stringUtil.isEmpty(globalExcludeString)) {
+        globalExcludeString = "";
       }
-      try {
 
-        String globalExclude[] = ((String) searchCriteria
-          .getParamValue("gEMessage"))
+      String globalExclude[] = globalExcludeString
           .split(LogSearchConstants.I_E_SEPRATOR);
 
-        for (String exc : globalExclude) {
-          excludeString = excludeString + ",\"" + exc + "\"";
-        }
+      for (String exc : globalExclude) {
+        excludeString = excludeString + ",\"" + exc + "\"";
+      }
 
-        if (!stringUtil.isEmpty(excludeString)) {
-          if (!isNormalExcluded)
-            excludeString = excludeString.replaceFirst(",", "");
-          vsummary.setExcludeString(excludeString);
+      if (!stringUtil.isEmpty(excludeString)) {
+        if (!isNormalExcluded) {
+          excludeString = excludeString.replaceFirst(",", "");
         }
-      } catch (Exception ne) {
-        // do nothing
+        vsummary.setExcludeString(excludeString);
       }
 
       for (SolrDocument solrDoc : docList) {
 
-        Date logTimeDateObj = (Date) solrDoc
-          .get(LogSearchConstants.LOGTIME);
-
+        Date logTimeDateObj = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
+        if(logTimeDateObj != null){
         String logTime = dateUtil.convertSolrDateToNormalDateFormat(
-          logTimeDateObj.getTime(), Long.parseLong(utcOffset));
+            logTimeDateObj.getTime(), Long.parseLong(utcOffset));
         solrDoc.remove(LogSearchConstants.LOGTIME);
         solrDoc.addField(LogSearchConstants.LOGTIME, logTime);
+        }
       }
 
       if (format.toLowerCase(Locale.ENGLISH).equals(".txt")) {
@@ -1469,21 +1485,21 @@ public class LogsMgr extends MgrBase {
         textToSave = convertObjToString(docList);
       } else {
         throw restErrorUtil.createRESTException(
-          "unsoported format either should be json or text",
-          MessageEnums.ERROR_SYSTEM);
+            "unsoported format either should be json or text",
+            MessageEnums.ERROR_SYSTEM);
       }
       return fileUtil.saveToFile(textToSave, fileName, vsummary);
 
     } catch (SolrException | SolrServerException | IOException
       | ParseException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String getComponentListWithLevelCounts(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/getComponentListWithLevelCounts");
 
     if (searchCriteria.getSortBy() == null) {
@@ -1512,33 +1528,39 @@ public class LogsMgr extends MgrBase {
       }
       List<VNode> datatList = new ArrayList<VNode>();
       for (PivotField singlePivotField : secondHirarchicalPivotFields) {
-        VNode comp = new VNode();
-        comp.setName("" + singlePivotField.getValue());
-        List<PivotField> levelList = singlePivotField.getPivot();
-        List<VNameValue> levelCountList = new ArrayList<VNameValue>();
-        comp.setLogLevelCount(levelCountList);
-        for (PivotField levelPivot : levelList) {
-          VNameValue level = new VNameValue();
-          level.setName(("" + levelPivot.getValue()).toUpperCase());
-          level.setValue("" + levelPivot.getCount());
-          levelCountList.add(level);
+        if (singlePivotField != null) {
+          VNode comp = new VNode();
+          comp.setName("" + singlePivotField.getValue());
+          List<PivotField> levelList = singlePivotField.getPivot();
+          List<VNameValue> levelCountList = new ArrayList<VNameValue>();
+          comp.setLogLevelCount(levelCountList);
+          for (PivotField levelPivot : levelList) {
+            VNameValue level = new VNameValue();
+            level.setName(("" + levelPivot.getValue()).toUpperCase());
+            level.setValue("" + levelPivot.getCount());
+            levelCountList.add(level);
+          }
+          datatList.add(comp);
         }
-        datatList.add(comp);
       }
       list.setvNodeList(datatList);
       return convertObjToString(list);
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String getExtremeDatesForBundelId(SearchCriteria searchCriteria) {
     SolrQuery solrQuery = new SolrQuery();
+    VNameValueList nameValueList = new VNameValueList();
     try {
       String bundelId = (String) searchCriteria
         .getParamValue(LogSearchConstants.BUNDLE_ID);
+      if(stringUtil.isEmpty(bundelId)){
+        bundelId = "";
+      }
 
       queryGenerator.setSingleIncludeFilter(solrQuery,
         LogSearchConstants.BUNDLE_ID, bundelId);
@@ -1546,11 +1568,18 @@ public class LogsMgr extends MgrBase {
       queryGenerator.setMainQuery(solrQuery, null);
       solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.asc);
       queryGenerator.setRowCount(solrQuery, 1);
-      VNameValueList nameValueList = new VNameValueList();
+     
       List<VNameValue> vNameValues = new ArrayList<VNameValue>();
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      
+      if(response == null){
+        return convertObjToString(nameValueList);
+      }
 
       SolrDocumentList solrDocList = response.getResults();
+      if(solrDocList == null){
+        return convertObjToString(nameValueList);
+      }
       for (SolrDocument solrDoc : solrDocList) {
 
         Date logTimeAsc = (Date) solrDoc
@@ -1575,37 +1604,42 @@ public class LogsMgr extends MgrBase {
 
       solrDocList = response.getResults();
       for (SolrDocument solrDoc : solrDocList) {
-        Date logTimeDesc = (Date) solrDoc
-          .getFieldValue(LogSearchConstants.LOGTIME);
-
-        if (logTimeDesc != null) {
-          VNameValue nameValue = new VNameValue();
-          nameValue.setName("To");
-          nameValue.setValue("" + logTimeDesc.getTime());
-          vNameValues.add(nameValue);
+        if (solrDoc != null) {
+          Date logTimeDesc = (Date) solrDoc
+              .getFieldValue(LogSearchConstants.LOGTIME);
+
+          if (logTimeDesc != null) {
+            VNameValue nameValue = new VNameValue();
+            nameValue.setName("To");
+            nameValue.setValue("" + logTimeDesc.getTime());
+            vNameValues.add(nameValue);
+          }
         }
       }
       nameValueList.setVNameValues(vNameValues);
-      return convertObjToString(nameValueList);
+      
 
     } catch (SolrServerException | SolrException | IOException e) {
-      logger.error(e);
-      try {
-        return convertObjToString(new VNameValueList());
-      } catch (IOException e1) {
-        throw restErrorUtil.createRESTException(e1.getMessage(),
-          MessageEnums.DATA_NOT_FOUND);
-      }
+      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
+      nameValueList=new VNameValueList();
     }
+    return convertObjToString(nameValueList);
   }
 
   protected VGroupList getSolrGroupList(SolrQuery query)
-    throws SolrServerException, IOException, SolrException {
+      throws SolrServerException, IOException, SolrException {
+    VGroupList collection = new VGroupList();
     QueryResponse response = serviceLogsSolrDao.process(query);
+    if (response == null) {
+      return collection;
+    }
     SolrDocumentList docList = response.getResults();
-    VGroupList collection = new VGroupList(docList);
-    collection.setStartIndex((int) docList.getStart());
-    collection.setTotalCount(docList.getNumFound());
+    if (docList != null) {
+      collection.setGroupDocuments(docList);
+      collection.setStartIndex((int) docList.getStart());
+      collection.setTotalCount(docList.getNumFound());
+    }
+
     return collection;
   }
 
@@ -1613,7 +1647,13 @@ public class LogsMgr extends MgrBase {
     SolrServerException, IOException {
     query.setRows(0);
     QueryResponse response = serviceLogsSolrDao.process(query);
+    if(response == null){
+      return 0l;
+    }
     SolrDocumentList docList = response.getResults();
+    if(docList == null){
+      return 0l;
+    }
     return docList.getNumFound();
   }
 
@@ -1621,21 +1661,20 @@ public class LogsMgr extends MgrBase {
     String fieldsNameStrArry[] = PropertiesUtil
       .getPropertyStringList("solr.servicelogs.fields");
     if (fieldsNameStrArry.length > 0) {
-      try {
-        List<String> uiFieldNames = new ArrayList<String>();
-        String temp = null;
-        for (String field : fieldsNameStrArry) {
-          temp = ConfigUtil.serviceLogsColumnMapping.get(field
+
+      List<String> uiFieldNames = new ArrayList<String>();
+      String temp = null;
+      for (String field : fieldsNameStrArry) {
+        temp = ConfigUtil.serviceLogsColumnMapping.get(field
             + LogSearchConstants.SOLR_SUFFIX);
-          if (temp == null)
-            uiFieldNames.add(field);
-          else
-            uiFieldNames.add(temp);
+        if (temp == null){
+          uiFieldNames.add(field);
+        }else{
+          uiFieldNames.add(temp);
         }
-        return convertObjToString(uiFieldNames);
-      } catch (IOException e) {
-        logger.error("converting object to json failed", e);
       }
+      return convertObjToString(uiFieldNames);
+
     }
     throw restErrorUtil.createRESTException(
       "No field name found in property file",
@@ -1648,14 +1687,14 @@ public class LogsMgr extends MgrBase {
     List<String> fieldNames = new ArrayList<String>();
     String suffix = PropertiesUtil.getProperty("solr.core.logs");
     String excludeArray[] = PropertiesUtil
-      .getPropertyStringList("servicelogs.exclude.columnlist");
+        .getPropertyStringList("servicelogs.exclude.columnlist");
 
     HashMap<String, String> uiFieldColumnMapping = new LinkedHashMap<String, String>();
     ConfigUtil.getSchemaFieldsName(suffix, excludeArray, fieldNames);
 
     for (String fieldName : fieldNames) {
       String uiField = ConfigUtil.serviceLogsColumnMapping.get(fieldName
-        + LogSearchConstants.SOLR_SUFFIX);
+          + LogSearchConstants.SOLR_SUFFIX);
       if (uiField != null) {
         uiFieldColumnMapping.put(fieldName, uiField);
       } else {
@@ -1663,28 +1702,19 @@ public class LogsMgr extends MgrBase {
       }
     }
 
-    try {
-      HashMap<String, String> uiFieldColumnMappingSorted = new LinkedHashMap<String, String>();
-      uiFieldColumnMappingSorted.put(LogSearchConstants.SOLR_LOG_MESSAGE,
-        "");
-
-      Iterator<Entry<String, String>> it = bizUtil
-        .sortHashMapByValuesD(uiFieldColumnMapping).entrySet()
-        .iterator();
-      while (it.hasNext()) {
-        @SuppressWarnings("rawtypes")
-        Map.Entry pair = (Map.Entry) it.next();
-        uiFieldColumnMappingSorted.put("" + pair.getKey(),
-          "" + pair.getValue());
-        it.remove();
-      }
-
-      return convertObjToString(uiFieldColumnMappingSorted);
-    } catch (IOException e) {
-      logger.error(e);
+    HashMap<String, String> uiFieldColumnMappingSorted = new LinkedHashMap<String, String>();
+    uiFieldColumnMappingSorted.put(LogSearchConstants.SOLR_LOG_MESSAGE, "");
+
+    Iterator<Entry<String, String>> it = bizUtil
+        .sortHashMapByValues(uiFieldColumnMapping).entrySet().iterator();
+    while (it.hasNext()) {
+      @SuppressWarnings("rawtypes")
+      Map.Entry pair = (Map.Entry) it.next();
+      uiFieldColumnMappingSorted.put("" + pair.getKey(), "" + pair.getValue());
     }
-    throw restErrorUtil.createRESTException(
-      "Cache is Empty for FieldsName", MessageEnums.DATA_NOT_FOUND);
+
+    return convertObjToString(uiFieldColumnMappingSorted);
+
   }
 
   @SuppressWarnings("unchecked")
@@ -1722,56 +1752,40 @@ public class LogsMgr extends MgrBase {
     }
   }
 
-  public String getCurrentPageOfKeywordSearch(String requestDate) {
-    if (stringUtil.isEmpty(requestDate)) {
-      logger.error("Unique id is Empty");
-      throw restErrorUtil.createRESTException("Unique id is Empty",
-        MessageEnums.DATA_NOT_FOUND);
-    }
-    String pageNumber = mapUniqueId.get(requestDate);
-    if (stringUtil.isEmpty(pageNumber)) {
-      logger.error("Not able to find Page Number");
-      throw restErrorUtil.createRESTException("Page Number not found",
-        MessageEnums.DATA_NOT_FOUND);
-    }
-    return pageNumber;
-  }
-
   public String getAnyGraphData(SearchCriteria searchCriteria) {
-    searchCriteria.addParam("feildTime", LogSearchConstants.LOGTIME);
+    searchCriteria.addParam("fieldTime", LogSearchConstants.LOGTIME);
     String suffix = PropertiesUtil.getProperty("solr.core.logs");
     searchCriteria.addParam("suffix", suffix);
-    SolrQuery solrQuery = queryGenerator.commonFilterQuery(searchCriteria);
-    String result = graphDataGnerator.getAnyGraphData(searchCriteria,
-      serviceLogsSolrDao, solrQuery);
-    if (result != null)
-      return result;
-    try {
-      return convertObjToString(new VBarDataList());
-    } catch (IOException e) {
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
+        serviceLogsSolrDao, solrQuery);
+    if (result == null) {
+      result = new VBarDataList();
     }
+    return convertObjToString(result);
+
   }
 
   public String getAfterBeforeLogs(SearchCriteria searchCriteria) {
+    VSolrLogList vSolrLogList = new VSolrLogList();
     SolrDocumentList docList = null;
     String id = (String) searchCriteria
       .getParamValue(LogSearchConstants.ID);
     if (stringUtil.isEmpty(id)) {
-      try {
-        return convertObjToString(new VSolrLogList());
-      } catch (IOException e) {
-        throw restErrorUtil.createRESTException(e.getMessage(),
-          MessageEnums.ERROR_SYSTEM);
-      }
+      return convertObjToString(vSolrLogList);
+
     }
     String maxRows = "";
 
     maxRows = (String) searchCriteria.getParamValue("numberRows");
-    if (stringUtil.isEmpty(maxRows))
-      maxRows = "10";
+    if (stringUtil.isEmpty(maxRows)){
+      maxRows = ""+maxRows;
+    }
     String scrollType = (String) searchCriteria.getParamValue("scrollType");
+    if(stringUtil.isEmpty(scrollType)){
+      scrollType = "";
+    }
+    
     String logTime = null;
     String sequenceId = null;
     try {
@@ -1780,6 +1794,9 @@ public class LogsMgr extends MgrBase {
         queryGenerator.buildFilterQuery(LogSearchConstants.ID, id));
       queryGenerator.setRowCount(solrQuery, 1);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if(response == null){
+        return convertObjToString(vSolrLogList);
+      }
       docList = response.getResults();
       if (docList != null && !docList.isEmpty()) {
         Date date = (Date) docList.get(0).getFieldValue(
@@ -1790,14 +1807,14 @@ public class LogsMgr extends MgrBase {
           LogSearchConstants.SEQUNCE_ID);
       }
       if (stringUtil.isEmpty(logTime)) {
-        return convertObjToString(new VSolrLogList());
+        return convertObjToString(vSolrLogList);
       }
     } catch (SolrServerException | SolrException | IOException e) {
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.DATA_NOT_FOUND);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType)) {
-      VSolrLogList vSolrLogList = whenScrollUp(searchCriteria, logTime,
+      vSolrLogList = whenScrollUp(searchCriteria, logTime,
         sequenceId, maxRows);
 
       SolrDocumentList solrDocList = new SolrDocumentList();
@@ -1805,55 +1822,42 @@ public class LogsMgr extends MgrBase {
         solrDocList.add(solrDoc);
       }
       vSolrLogList.setSolrDocuments(solrDocList);
-      try {
         return convertObjToString(vSolrLogList);
-      } catch (IOException e) {
-        // do nothing
-      }
+     
     } else if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) {
       SolrDocumentList solrDocList = new SolrDocumentList();
-      VSolrLogList vSolrLogList = new VSolrLogList();
+      vSolrLogList = new VSolrLogList();
       for (SolrDocument solrDoc : whenScrollDown(searchCriteria, logTime,
-        sequenceId, maxRows).getList()) {
+          sequenceId, maxRows).getList()) {
         solrDocList.add(solrDoc);
       }
       vSolrLogList.setSolrDocuments(solrDocList);
-      try {
-        return convertObjToString(vSolrLogList);
-      } catch (IOException e) {
-        // do nothing
-      }
+      return convertObjToString(vSolrLogList);
 
     } else {
-      VSolrLogList vSolrLogList = new VSolrLogList();
+      vSolrLogList = new VSolrLogList();
       SolrDocumentList initial = new SolrDocumentList();
       SolrDocumentList before = whenScrollUp(searchCriteria, logTime,
         sequenceId, maxRows).getList();
       SolrDocumentList after = whenScrollDown(searchCriteria, logTime,
         sequenceId, maxRows).getList();
-      if (before == null || before.isEmpty())
-        before = new SolrDocumentList();
+      if (before == null || before.isEmpty()){
+        return convertObjToString(vSolrLogList);
+      }
       for (SolrDocument solrDoc : Lists.reverse(before)) {
         initial.add(solrDoc);
       }
       initial.add(docList.get(0));
-      if (after == null || after.isEmpty())
-        after = new SolrDocumentList();
+      if (after == null || after.isEmpty()){
+        return convertObjToString(vSolrLogList);
+      }
       for (SolrDocument solrDoc : after) 

<TRUNCATED>

[2/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
index 96eea8d..1dd6bc7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
@@ -93,7 +93,7 @@ define(['require',
 			this.collection = new VLogList([], {
                 state: {
                     firstPage: 0,
-                    pageSize: 50
+                    pageSize: 25
                 }
             });
 			this.collection.url = Globals.baseURL + "dashboard/solr/logs_search";
@@ -108,7 +108,7 @@ define(['require',
 			this.listenTo(this.collection, "backgrid:refresh",function(){
 				$(".contextMenuBody [data-id='F']").show();
             	$(".contextMenuBody").hide();
-            	this.$("#loaderGraph").hide();
+            	//this.$("#loaderGraph").hide();
 				this.$(".loader").hide();
             	//this.ui.find.trigger("keyup");
 //            	if (this.quickHelp)
@@ -170,7 +170,7 @@ define(['require',
             },this);
 		},
 		fetchAllTogether : function(value){
-			this.$("#loaderGraph").show();
+			//this.$("#loaderGraph").show();
 			this.fetchTableData(value);
         	_.extend(this.graphParams,value);
         	//this.fetchGraphData(this.graphParams);
@@ -431,7 +431,17 @@ define(['require',
 		fetchTableData : function(params){
 			var that = this;
 			$.extend(this.collection.queryParams,params);
-			this.collection.getFirstPage({reset:true});
+			this.collection.getFirstPage({
+				reset:true,
+				beforeSend : function(){
+        			that.$("#loaderGraph").show();
+        			that.$(".loader").show();
+        		},
+        		complete : function(){
+					that.$("#loaderGraph").hide();
+					that.$(".loader").hide();
+				}
+			});
 		},
 		fetchTableCollection : function(queryParams, param){
 			var that = this;
@@ -440,12 +450,17 @@ define(['require',
 				reset:true,
 				beforeSend : function(){
         			that.$("#loaderGraph").show();
-        		}
+        			that.$(".loader").show();
+        		},
+        		complete : function(){
+					that.$("#loaderGraph").hide();
+					that.$(".loader").hide();
+				}
 			},param));
 		},
 		fetchGraphData : function(params){
 			var that = this;
-			that.$("#loaderGraph").show();
+			//that.$("#loaderGraph").show();
 			that.$(".loader").show();
 			this.graphModel.fetch({
 				dataType:"json",
@@ -456,7 +471,7 @@ define(['require',
 				error : function(){
 				},
 				complete : function(){
-					that.$("#loaderGraph").hide();
+					//that.$("#loaderGraph").hide();
 					that.$(".loader").hide();
 				}
 			});

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
index 6c9b224..9c426f5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostListView.js
@@ -279,8 +279,7 @@ define(['require',
 				
 			}
 			var data = this.getCheckedHierarchyData();
-			console.log(data);
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(data)});
+			this.vent.trigger("tree:search",{treeParams : JSON.stringify(_.pluck(data,"h"))});
 		},
 		getCheckedHierarchyData : function(){
 			var data=[];
@@ -305,7 +304,7 @@ define(['require',
 				this.$('.tree  input[type="checkbox"]').prop({"checked":false,"indeterminate":false});
 			var data = this.getCheckedHierarchyData();
 			this.params.treeParams = _.extend({},data);
-			this.vent.trigger("tree:search",{treeParams : JSON.stringify(data)});
+			this.vent.trigger("tree:search",{treeParams : JSON.stringify(_.pluck(data,"h"))});
 			
 		},
 		onSearchHostClick : function(e){

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/SaveSearchFilterView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/SaveSearchFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/SaveSearchFilterView.js
index 33cd400..2ef2f94 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/SaveSearchFilterView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/SaveSearchFilterView.js
@@ -80,7 +80,9 @@ define(['require',
                     that = this;
 
                 _.each(this.params, function(value, key) {
-                    if ((key != "from" && value != "" && key != "to" && key != "bundleId" && key != "start_time" && key != "end_time" && key != "q" && key != "unit" && key != "query" && key != "type" && key != "time" && key != "dateRangeLabel" && key != "advanceSearch")) {
+                    if ((key != "from" && value != "" && key != "to" && key != "bundleId" && key != "start_time" && 
+                    		key != "end_time" && key != "q" && key != "unit" && key != "query" && key != "type" && 
+                    		key != "time" && key != "dateRangeLabel" && key != "advanceSearch" && !_.isUndefined(Globals.paramsNameMapping[key]) )) {
                         tableSting += '<tr class="' + key + '"><td>' + Globals.paramsNameMapping[key].label + '</td><td>' + (that.createInnerSpan(key)) + '</td><tr>'
                     }
                 })

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
index 611a51e..2560103 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
@@ -49,6 +49,10 @@ define(['require',
             events: function() {
                 var events = {};
                 events["click [data-override]"] = 'onDataOverrideClick';
+                events["click [data-value]"] = 'onLogLevelHeaderClick';
+                events["click #filterContent input[type='checkbox']"] = 'onAnyCheckboxClick';
+                events["click .overrideRow a"] = 'onEditHost';
+                
                 return events;
             },
 
@@ -97,8 +101,7 @@ define(['require',
                 var that = this;
                 // this.setupSelect2Fields(this.levelCollection, "type", "type", "levelSelect2", 'Select Level');
                 
-                $.when(this.hostList.fetch({ reset: true }), this.componentsList.fetch({ reset: true }), this.model.fetch({})).done(function(c1, c2, m1) {
-                    
+                $.when(this.hostList.fetch({ reset: true }), this.componentsList.fetch({ reset: true }), this.model.fetch({})).then(function(c1, c2, m1) {
                     // if (!_.isUndefined(that.model.get('components'))) {
                     //     that.ui.componentSelect2.select2('val', that.model.get('components'));
                     // }
@@ -108,7 +111,7 @@ define(['require',
                     // if (!_.isUndefined(that.model.get('levels'))) {
                     //     that.ui.levelSelect2.select2('val', that.model.get('levels'));
                     // }
-                    that.ui.loader.hide();
+                    that.hideLoading();
                     that.trigger("toggle:okBtn",true);
 
                     //that.dataLevels = [];
@@ -118,14 +121,24 @@ define(['require',
                     //that.dataList = _.pluck(that.componentsList.models, 'attributes');
                     that.renderComponents();
                     that.populateValues();
+                },function(error){
+                	that.hideLoading();
+                	Utils.notifyError({
+                        content: "There is some issues on server, Please try again later."
+                    });
+                	that.trigger("closeDialog");
                 });
             },
+            hideLoading : function(){
+            	this.ui.loader.hide();
+            },
             renderComponents : function(){
             	var that =this;
             	_.each(that.componentsList.models, function(model){
-                    var levels='<td align="left">'+model.get("type")+'<span class="pull-right"><small><i>Override</i></small> <input data-override type="checkbox" data-name='+model.get("type")+'></span></td>';
-                    levels += that.getLevelForComponent(model.get("type"),true);
-                    var html = '<tr data-component="'+model.get("type")+'">'+levels+'</tr>';
+                    var levels='<td align="left">'+model.get("type")+'</td>';
+                    var override = '<td class="text-left"><span class="pull-left"><!--small><i>Override</i></small--> <input data-override type="checkbox" data-name='+model.get("type")+'></span></td>';
+                    levels +=  override + that.getLevelForComponent(model.get("type"),false);
+                    var html = '<tr class="overrideSpacer"></tr><tr class="componentRow borderShow" data-component="'+model.get("type")+'">'+levels+'</tr><tr></tr>';
                     that.ui.filterContent.append(html);
                 });
             },
@@ -135,6 +148,7 @@ define(['require',
             		var components = this.model.get("filter");
             		_.each(components,function(value,key){
             			var obj = components[key];
+            			
             			if((_.isArray(obj.overrideLevels) && obj.overrideLevels.length) || 
             					(_.isArray(obj.hosts) && obj.hosts.length) || obj.expiryTime){
             				var $el = that.$("input[data-name='"+key+"']").filter("[data-override]");
@@ -150,6 +164,7 @@ define(['require',
             							var $checkbox = $override.find("input[data-id='"+obj.overrideLevels[z]+"']");
             							if(! $checkbox.is(":checked")){
             								$checkbox.prop("checked",true);
+            								// that.showHostSelect2(key);
             							}
             						}
             					}
@@ -160,15 +175,75 @@ define(['require',
             				var dateObj = Utils.dateUtil.getMomentObject(obj.expiryTime);
             				that.$("[data-date='"+key+"']").data('daterangepicker').setStartDate(dateObj);
             				that.$("[data-date='"+key+"']").val(dateObj.format("MM/DD/YYYY HH:mm"));
+            				that.showExpiry(key)
             			}
             			//setting hosts
             			if(_.isArray(obj.hosts)){
             				if(obj.hosts.length){
             					that.$("[data-host='"+key+"']").select2("val",obj.hosts);
+                                that.showHostSelect2(key);
             				}
             			}
+            			//setting default values
+            			if(obj.defaultLevels && _.isArray(obj.defaultLevels) && obj.defaultLevels.length){
+            				var $default = that.$("tr[data-component='"+key+"']");
+        					if($default.length){
+        						for(var z=0; z<obj.defaultLevels.length; z++){
+        							var $checkbox = $default.find("input[data-id='"+obj.defaultLevels[z]+"']");
+        							if(! $checkbox.is(":checked")){
+        								$checkbox.prop("checked",true);
+        							}
+        						}
+        					}
+            			}
             		});
             	}
+            	//set check all value
+            	_.each(this.levelCollection.models,function(model){
+            		that.setCheckAllValue(model.get("type"));
+            	});
+            	
+            },
+            onAnyCheckboxClick : function(e){
+            	var $el = $(e.currentTarget);
+            	this.setCheckAllValue($el.data("id"));
+            },
+            onEditHost : function(e){
+            	var $el = $(e.currentTarget);
+            	$el.hide();
+            	if($el.data("type") == "host"){
+            		this.showHostSelect2($el.data("component"));
+                }else{
+            		this.showExpiry($el.data("component"));
+                }
+            },
+            hideHostSelect2 : function(forComponent){
+            	this.ui[forComponent].siblings(".select2-container").hide();
+            	this.$("a[data-component='"+forComponent+"'][data-type='host']").show();
+                this.$('i.hostDown[data-component="'+forComponent+'"]').show();
+            },
+            showHostSelect2 : function(forComponent){
+            	this.ui[forComponent].siblings(".select2-container").show();
+            	this.$("a[data-component='"+forComponent+"'][data-type='host']").hide();
+                this.$('i.hostDown[data-component="'+forComponent+'"]').hide();
+            },
+            showExpiry : function(forComponent){
+            	this.$("[data-date='"+forComponent+"']").show();
+            	this.$("a[data-component='"+forComponent+"'][data-type='expiry']").hide();
+            },
+            hideExpiry : function(forComponent){
+            	this.$("[data-date='"+forComponent+"']").hide();
+            	this.$("a[data-component='"+forComponent+"'][data-type='expiry']").show();
+            },
+            setCheckAllValue : function(type){
+            	var that = this;
+            	if(! type)
+            		return
+            	if(that.$("[data-id='"+type+"']:checked").length == that.$("[data-id='"+type+"']").length){
+        			that.$("[data-value='"+type+"']").prop("checked",true);
+        		}else{
+        			that.$("[data-value='"+type+"']").prop("checked",false);
+        		}
             },
             getLevelForComponent : function(type,checked){
             	var html="";
@@ -181,19 +256,35 @@ define(['require',
             	var $el = $(e.currentTarget);
             	if(e.currentTarget.checked){
             		this.addOverrideRow($el.data("name"));
+                    this.$('tr[data-component="'+$el.data("name")+'"]').removeClass('borderShow ');
+                    this.$('tr[data-component="'+$el.data("name")+'"]').addClass('bgHighlight ');
             	}else{
             		this.removeOverrideRow($el.data("name"));
+                    this.$('tr[data-component="'+$el.data("name")+'"]').addClass('bgHighlight borderShow ');
+                    this.$('tr[data-component="'+$el.data("name")+'"]').removeClass('bgHighlight ');
+            	}
+            },
+            onLogLevelHeaderClick : function(e){
+            	var $el = $(e.currentTarget);
+            	if(e.currentTarget.checked){
+            		this.$("[data-id='"+$el.data("value")+"']").prop("checked",true);
+            	}else{
+            		this.$("[data-id='"+$el.data("value")+"']").prop("checked",false);
             	}
             },
             addOverrideRow : function(forComponent){
-            	var $el = this.ui.filterContent.find("tr[data-component='"+forComponent+"']");
+            	var $el = this.ui.filterContent.find("tr[data-component='"+forComponent+"']"),textForHost = "Click here to apply on specific host",
+            	textForExpiry="Select Expiry Date";
             	if($el.length){
-            		var html = "<tr class='overrideRow "+forComponent+"'><td>&nbsp;</td>"+this.getLevelForComponent($el.data("component"),false)+"</tr>";
-            		html += "<tr class='overrideRow "+forComponent+"'><td>&nbsp;</td><td colspan='3'><input class='datepickerFilter' data-date='"+forComponent+"'></td>" +
-            				"<td colspan='3'><div ><input data-host='"+forComponent+"' type='hidden' /></div></td></tr>"
+            		var html = "<tr class='overrideRow bgHighlight "+forComponent+"'><td class='text-left'><i data-component='"+forComponent+"' class='fa fa-level-down hostDown' aria-hidden='true'></i><a href='javascript:void(0);' data-type='host' data-component='"+forComponent+"'>"+textForHost+"</a><input data-host='"+forComponent+"' type='hidden' /></td>" +
+            				"<td  class='text-left'><a href='javascript:void(0);' data-type='expiry' data-component='"+forComponent+"'>"+textForExpiry+"</a>" +
+            				"<input class='datepickerFilter' data-date='"+forComponent+"'></td>"+this.getLevelForComponent($el.data("component"),false)+"</tr>";
+            		//html += "<tr class='overrideRow "+forComponent+"'><td>&nbsp;</td><td>&nbsp;</td><td colspan='3'><input class='datepickerFilter' data-date='"+forComponent+"'></td>" +
+            			//	"<td colspan='3'><div ><input data-host='"+forComponent+"' type='hidden' /></div></td></tr>"
             		$el.after(html);
             		this.ui[forComponent] = this.$("[data-host='"+forComponent+"']");
             		this.setupSelect2Fields(this.hostList, "host", 'host', forComponent, 'Select Host', 'hostBoolean');
+            		this.hideHostSelect2(forComponent);
             		this.$("[data-date='"+forComponent+"']").daterangepicker({
             	        singleDatePicker: true,
             	        showDropdowns: true,
@@ -208,6 +299,7 @@ define(['require',
 //                        "timePickerIncrement": 1,
                         "timePicker24Hour": true,
             	    });
+            		this.hideExpiry(forComponent);
             	}
             },
             removeOverrideRow : function(foComponent){
@@ -252,7 +344,6 @@ define(['require',
             				expiryTime : (date && date.startDate) ? date.startDate.toJSON() : ""
             		};
             	});
-            	console.log(obj);
             	return (obj);
             },
             getOverideValues : function(ofComponent){

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
index 3abfc8d..e8cbba6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
@@ -96,7 +96,7 @@ define(['require',
                 this.logFileCollection = new VLogList([], {
                     state: {
                         firstPage: 0,
-                        pageSize: 50
+                        pageSize: 25
                     }
                 });
                 this.logFileCollection.url = Globals.baseURL + "dashboard/solr/logs_search";
@@ -620,7 +620,7 @@ define(['require',
                     this.RLogFileTable.currentView.$el.find(".logMessage").highlight(selection.toString().trim(), true, e.currentTarget);
                     this.ui.contextMenu.show();
                     this.ui.contextMenu.css({
-                        'top': e.pageY - 88,
+                        'top': e.pageY - 140,
                         'left': e.pageX
                     });
                 } else {
@@ -636,7 +636,11 @@ define(['require',
                         this.ui.find.val(this.selectionText);
                         this.ui.find.trigger("keyup");
                         this.ui.find.focus();
-                    } else {
+                    }else if(type === "IA" || type === "EA"){
+    					this.vent.trigger("toggle:facet",{viewName:((type === "IA") ? "include" : "exclude") +"ServiceColumns",
+    						key:Globals.serviceLogsColumns["log_message"],value:"*"+this.selectionText+"*"});
+    				} 
+                    else {
                         //this.vent.trigger("add:include:exclude",{type:type,value:this.selectionText});
                         this.vent.trigger("toggle:facet", { viewName: ((type === "I") ? "include" : "exclude") + "ServiceColumns", key: Globals.serviceLogsColumns["log_message"], value: this.selectionText });
                     }
@@ -778,7 +782,8 @@ define(['require',
                         error: function(col, response, errorThrown) {
                             that.resetFindParams();
                             if (!!errorThrown.xhr.getAllResponseHeaders()) {
-                                Utils.notifyInfo({ content: "Keyword '" + val + "' not found in " + (keywordType == 1 ? "next" : "previous") + " page !" });
+                              //  Utils.notifyInfo({ content: "Keyword '" + val + "' not found in " + (keywordType == 1 ? "next" : "previous") + " page !" });
+                                that.ui.clearSearch.css({ 'right': 82 + 'px' });
                             }
                         },
                         complete: function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
index d2bcd3b..2918606 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
@@ -302,7 +302,6 @@ define(['require',
 				
 			}
 			var data = this.getCheckedHierarchyData();
-			console.log(data);
 			this.vent.trigger("tree:search",{treeParams : JSON.stringify(data)});
 		},
 		onNewTabIconClick : function(e){

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
index 58b6707..892bc57 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
@@ -71,7 +71,7 @@ define(['backbone',
                 this.vent = new Backbone.Wreqr.EventAggregator();
                 //this.servicesData = {services:{ranger:{label:"Ranger",components:[{name:"ranger_admin"}],dealsWithServices:[{name:"hdfs"},{name:"kms"}],dealsWithComponents:[{name:"security_admin"},{name:"portal"}],},ambari:{label:"Ambari",dealsWithServices:[{name:"ranger"},{name:"hive"}]},hdfs:{label:"Hdfs",components:[{name:"hdfs_namenode"},{name:"hdfs_datanode"}],dealsWithServices:[],dealsWithComponents:[],}}};
                 var todayRange = Utils.dateUtil.getTodayRange();
-                this.params = _.pick(ViewUtils.getDefaultParamsForHierarchy(),"from","to","bundle_id");
+                this.params = _.pick(ViewUtils.getDefaultParamsForHierarchy(),"from","to","bundle_id","host_name","component_name","file_name");
                 this.initializeCollection();
                 this.bindEvents();
             },
@@ -107,6 +107,7 @@ define(['backbone',
             },
             bindEvents : function(){
             	this.listenTo(this.serviceLogsCollection,"reset",function(){
+            		this.renderBarGraph();
             		this.renderLogLevelTable();
             	},this);
             	this.listenTo(this.serviceLogsCollection, 'request', function() {
@@ -126,7 +127,8 @@ define(['backbone',
             	this.listenTo(this.vent,"logtime:filter",function(params){
             		//this.fetchServiceLogsData(params);
             		//this.vent.trigger("graph:data:update",params,this.graphPropModel.attributes);
-            		this.renderGraph(params);
+            		//this.renderGraph(params);
+            		this.fetchLevelCollection(params);
             		this.fetchTopUsers(params);
             	},this);
             },
@@ -141,7 +143,8 @@ define(['backbone',
                 	}
                 });
             	this.renderDateRange();
-            	this.renderGraph(this.params);
+            	//this.renderGraph(this.params);
+            	this.fetchLevelCollection(this.params);
             },
             fetchTopUsers : function(params){
     			var that = this;
@@ -218,9 +221,10 @@ define(['backbone',
             	}
             	this.ui.components.select2("val",selectedComponents);
             	var params = this.getParams();
-            	this.renderGraph(params);
+            	//this.renderGraph(params);
             	//this.fetchServiceLogsData(params);
             	//this.fetchTopUsers(params);
+            	this.fetchLevelCollection(params);
             },
             renderServices : function(data){
             	var that = this;
@@ -255,9 +259,10 @@ define(['backbone',
     				}
     			}).on("change",function(e){
     				var params = that.getParams();
-    				that.renderGraph(params);
+    				//that.renderGraph(params);
                 	//that.fetchServiceLogsData(params);
                 	//that.fetchTopUsers(params);
+    				that.fetchLevelCollection(params);
     			});
             },
             renderDateRange : function(){
@@ -315,6 +320,13 @@ define(['backbone',
             	}
             	return params;
             },
+            fetchLevelCollection : function(params){
+            	_.extend(this.serviceLogsCollection.queryParams, params,{"yAxis":"count",
+        			"xAxis":"level"});
+                this.serviceLogsCollection.fetch({
+                    reset: true
+                });
+            },
             renderGraph : function(params){
             	//var that=this,model = new Backbone.Model({"id":"grid_histo0","title":"test","showX":"showX","xAxis":"access","xTimeFormat":"","xNormalFormat":"","showY":"showY","yAxis":"count","yAxisFormat":"","showLegend":"showLegend","stackOrGroup":"Normal","params":{"from":"2016-03-08T18:30:01.000Z","to":"2016-03-09T18:29:59.999Z","unit":"+1HOUR","yAxis":"count","xAxis":"access"},"myData":{"type":2,"dataId":"grid_histo0"},"col":1,"row":1,"size_x":3,"size_y":2});
             	var that=this,model = new Backbone.Model({
@@ -341,6 +353,53 @@ define(['backbone',
     				}));
                 });
             },
+            renderBarGraph : function(){
+            	var data=[],that=this;
+            	this.serviceLogsCollection.each(function(model){
+            		var d = {
+            				key : "Levels",
+            				values : []
+            		}
+            		for(var z=0; z<model.get("dataCount").length; z++){
+            			var name = model.get("dataCount")[z].name;
+            			d.values.push({
+            				label : (""+name).toUpperCase(),
+            				value : parseInt(model.get("dataCount")[z].value,10),
+            				color : (((""+name).toUpperCase() === 'ERROR') ? ("#E81D1D") :
+                                ( (""+name).toUpperCase() === 'INFO') ? ("#2577B5") :
+                                ( (""+name).toUpperCase() === 'WARN') ? ("#FF8916") :
+                                ( (""+name).toUpperCase() === 'FATAL') ? ("#830A0A") :
+                                ( (""+name).toUpperCase() === 'DEBUG') ? ("#65E8FF") :
+                                ( (""+name).toUpperCase() === 'TRACE') ? ("#888888") : "")
+            			});
+            			
+            		}
+            		data.push(d);
+            	});
+            	nv.addGraph(function() {
+            		  var chart = nv.models.discreteBarChart()
+            		    .x(function(d) { return d.label })
+            		    .y(function(d) { return d.value })
+            		    .staggerLabels(true)
+            		    .width(700)
+            		    .showValues(false)
+            		    chart.tooltip.enabled();
+            		  chart.yAxis
+				      .tickFormat(d3.format('d'));
+            		  
+            		  chart.margin({
+                          right: 100,
+                          left: 120,
+                      });
+            		  d3.select(that.$("[data-id='serviceGraph'] svg")[0])
+            		    .datum(data)
+            		    .transition().duration(500)
+            		    .call(chart)
+            		    ;
+            		  d3.selectAll
+            		  return chart;
+            	});
+            },
             renderLogLevelTable : function(){
             	var that = this;
             	this.ui.logLevelTable.empty();
@@ -439,7 +498,6 @@ define(['backbone',
     			$el.siblings().removeClass("active");
     			$el.toggleClass("active");
         		if(serviceSelected.length){
-        			console.log(this.servicesData.service);
 //        			var serviceObj = this.servicesData.service[serviceSelected.data("name")];
 //        			if(serviceObj){
         				var found = getDependentServices(serviceSelected.data("name"),service);

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
index 7361df7..de5e45a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
@@ -185,6 +185,12 @@ p.log-line:before {
 .btn-search {
 	padding: 2px 6px 3px;
 	line-height: 1.42857143;
+  margin:0 0;
+}
+
+.btn-searchCompare {
+  padding: 2px 6px 3px;
+  line-height: 1.42857143;
   margin:3px 0;
 }
 
@@ -1346,6 +1352,10 @@ img.quickLinkNav{
   right: -1px;
   top: -2px;
 }
+.hostNodes{
+  height: 450px;
+  overflow: auto;
+}
 .hostNodes li{
   cursor: pointer;
 }
@@ -1981,37 +1991,99 @@ input:-webkit-autofill:active{
 }
 
 
-/*
-.table-fixed thead {
-  width: 97%;
+
+.logFeederTable{
+  border-collapse: separate; border-spacing: 0;
+  position: fixed;
+}
+
+.logFeederTable{
+  width: 906px;
+  display : block;
 }
-.table-fixed tbody {
-  height: 230px;
+
+.logFeederTable tbody {
+  height: 365px;
   overflow-y: auto;
-  width: 100%;
+  width: 906px;
+  display : block;
 }
-.table-fixed thead > tr> th {
-  display: table-cell;
-  border-bottom:1px solid #CCC;
+
+.logFeederTable th {
+  width: 90px;
+  border-left:1px solid #CCC;
+  border-right:1px solid #CCC;
+  border-top:1px solid #CCC !important;
+  border-bottom:1px solid #CCC !important;
+  background-color: #FFF;
 }
-.table-fixed thead, .table-fixed tbody {
-  display: block;
+.logFeederTable th:first-of-type {
+  width: 312px;
+  border-right:1px solid #CCC;
+  border-top:1px solid #CCC !important;
+  border-bottom:1px solid #CCC !important;
 }
-.table-fixed thead > tr> th{
-  width:13%;
+.logFeederTable th:nth-child(2) {
+  width : 190px;
+  border-top:1px solid #CCC !important;
+  border-bottom:1px solid #CCC !important; 
 }
-.table-fixed thead > tr> th:first-of-type{
-  width:28%;
+.datepickerFilter {
+  width:100%;
 }
-.table-fixed thead > tr> th:last-of-type{
-  border-right-width:0;
-}*/
-.logFeederTabel th {
-	width : 12%;
+.logFeederTable td {
+  width :90px;
+  padding:0 10px;
 }
-.logFeederTabel th:first-of-type {
-	width : 28%;
+.logFeederTable td:first-of-type {
+  width: 269px;
+  padding:0 10px;
 }
-.datepickerFilter {
-	width:100%;
+.logFeederTable td:nth-child(2) {
+   width: 178px;
+   padding: 0 10px;
 }
+.logFeederTable tr.componentRow td{
+  border-top: solid 1px #CCC!important;
+}
+.logFeederTable tr.overrideRow td{
+  border-bottom: solid 1px #CCC!important; 
+}
+.logFeederTable tr.borderShow td{
+  border-bottom: solid 1px #CCC!important; 
+}
+
+.logFeederTable tr.componentRow td:first-child {border-left: solid 1px #CCC; }
+.logFeederTable tr.componentRow td:last-child { border-right: solid 1px #CCC;}
+
+
+.logFeederTable tr.overrideRow td:first-child { border-left: solid 1px #CCC;  }
+.logFeederTable tr.overrideRow td:last-child { border-right: solid 1px #CCC; }
+
+.logFeederTable tr.bgHighlight{
+  background-color: rgba(255, 238, 0, 0.19) !important;
+}
+
+.overrideSpacer{
+  height:13px !important;
+}
+.hostDown{
+  color: #333;
+  margin-right:5px;
+}
+input.filterInput{
+  height: 20px;
+}
+.logFeederTable a{
+  border-bottom:1px dotted;
+}
+.logFeederTable a:hover{
+  text-decoration: none;
+}
+.VS-search .search_input{
+  width: 2px !important;
+}
+.VS-search .search_input input{
+  margin-top: 0 !important;
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style_v2.css
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style_v2.css b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style_v2.css
index 832c80a..a4264a1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style_v2.css
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style_v2.css
@@ -1313,7 +1313,7 @@ ul.DTTT_dropdown.dropdown-menu > li:hover > a {
   min-height: 26px;
   display: block;
   height: 26px;
-  padding: 0 0 0 8px;
+  padding: 0 0 0 4px;
   overflow: hidden;
   position: relative;
   border: 1px solid #aaa;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/common/Header_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/common/Header_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/common/Header_tmpl.html
index c77f92f..0bc60c3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/common/Header_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/common/Header_tmpl.html
@@ -24,7 +24,12 @@
                         <img src="images/timezone.png" style=" height: 27px;  margin-top: 9px;">
                     </a>
                 </li>
-                <li class="dropdown" data-id="exclusionList" title="Global Exclusion">
+               <!--  <li class="dropdown" data-id="exclusionList" title="Global Exclusion">
+                    <a href="#" class="account excludeStatus" data-toggle="modal">
+                        <i class="fa fa-filter  pull-right"></i>
+                    </a>
+                </li> -->
+                 <li class="dropdown" data-id="createFilters" title="Global Exclusion">
                     <a href="#" class="account excludeStatus" data-toggle="modal">
                         <i class="fa fa-filter  pull-right"></i>
                     </a>
@@ -70,10 +75,10 @@
                                 <i class="fa fa-rocket"></i>
                                 <span>Take A Tour</span>
                             </a>
-                            <a class="hidden" href="javascript:void(0)" data-id="createFilters" title="Custom Filters">
+                           <!--  <a class="" href="javascript:void(0)" data-id="createFilters" title="Custom Filters">
                                 <i class="fa fa-filter"></i>
-                                <span>Create Filter</span>
-                            </a>
+                                <span>Logfeeder Filter</span>
+                            </a> -->
                             <a href="logout.html">
                                 <i class="fa fa-power-off"></i>
                                 <span>LogOut</span>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/MainLayoutView_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/MainLayoutView_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/MainLayoutView_tmpl.html
index cd0dda2..c79ed1d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/MainLayoutView_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/MainLayoutView_tmpl.html
@@ -224,8 +224,8 @@
                 <span>Add Component</span>
             </div>
             <div class="col-md-3">
-                <button class="btn btn-info btn-search" data-id="CompareButton">Compare</button>
-                <button class="btn bg-success btn-search" data-id="CompareClearAll" style="margin-bottom: 0px;">clear all</button>
+                <button class="btn btn-info btn-searchCompare" data-id="CompareButton">Compare</button>
+                <button class="btn bg-success btn-searchCompare" data-id="CompareClearAll" style="margin-bottom: 0px;">clear all</button>
             </div>
         </div>
     </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
index 5139e57..fe2a5c1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
@@ -23,17 +23,18 @@
                         </div> -->
                         <div class="row row-margin-bottom row-topMargin">
                             <div class="col-md-12">
-                                <table class="table table-bordered table-hover table-condensed text-center logFeederTabel">
+                                <table class="table table-condensed text-center logFeederTable">
                                        <thead>
-                                       <tr>
+                                        <tr>
                                            <th>Components</th>
-                                           <th class="FATAL">FATAL</th>
-                                           <th class="ERROR">ERROR</th>
-                                           <th class="WARN">WARN</th>
-                                           <th class="INFO">INFO</th>
-                                           <th class="DEBUG">DEBUG</th>
-                                           <th class="TRACE">TRACE</th>
-										</tr>
+                                           <th>Override</th>
+                                           <th class="FATAL text-center"><span class="pull-left"><input type="checkbox" data-value="FATAL"></span><span>FATAL</span></th>
+                                           <th class="ERROR  text-center"><span class="pull-left"><input type="checkbox" data-value="ERROR"></span><span>ERROR</span></th>
+                                           <th class="WARN  text-center"><span class="pull-left"><input type="checkbox" data-value="WARN"></span><span>WARN</span></th>
+                                           <th class="INFO  text-center"><span class="pull-left"><input type="checkbox" data-value="INFO"></span><span>INFO</span></th>
+                                           <th class="DEBUG  text-center"><span class="pull-left"><input type="checkbox" data-value="DEBUG"></span><span>DEBUG</span></th>
+                                           <th class="TRACE  text-center"><span class="pull-left"><input type="checkbox" data-value="TRACE"></span><span>TRACE</span></th>
+                                        </tr>
                                        </thead>
                                        <tbody id="filterContent"></tbody>
                                 </table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/LogFileView_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/LogFileView_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/LogFileView_tmpl.html
index 4869cd1..f86190c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/LogFileView_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/tabs/LogFileView_tmpl.html
@@ -154,13 +154,15 @@
 			</div>
 		</div>
 	</div>
-	<div class="btn-group contextMenu" style="display:none;position:absolute;z-index:9999;">
+	<div class="btn-group contextMenu dropup" style="display:none;position:absolute;z-index:9999;">
 	    <button type="button" class="btn btn-info btn-circle btn-app-sm btn-context dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
 	        <i class="fa fa-info"></i>
 	    </button>
 	    <ul class="dropdown-menu">
 	        <li><a data-id="I" href="javascript:void(0);">Include</a></li>
 	        <li><a data-id="E" href="javascript:void(0);">Exclude</a></li>
+	        <li><a data-id="IA" href="javascript:void(0);">*Include*</a></li>
+	        <li><a data-id="EA" href="javascript:void(0);">*Exclude*</a></li>
 	        <li role="separator" class="divider"></li>
 	        <li><a data-id="F" href="javascript:void(0);">Find</a></li>
 	    </ul>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/troubleshoot/TroubleShootLayoutView_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/troubleshoot/TroubleShootLayoutView_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/troubleshoot/TroubleShootLayoutView_tmpl.html
index a1b4e40..3faaaa6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/troubleshoot/TroubleShootLayoutView_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/troubleshoot/TroubleShootLayoutView_tmpl.html
@@ -76,7 +76,8 @@
 						</div>
 						<div class="box-content">
 							<div class="col-md-8" style="height: 250px;"
-								data-id="serviceGraph"></div>
+								data-id="serviceGraph"><svg></svg></div>
+
 							<div class="col-md-3" style="margin-top:1%">
 								<table class="table table-bordered table-hover custTable"
 									data-id="logLevelTable">

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/pom.xml b/ambari-logsearch/pom.xml
index 475de9f..9fa7c96 100644
--- a/ambari-logsearch/pom.xml
+++ b/ambari-logsearch/pom.xml
@@ -154,6 +154,7 @@
           <excludes>
             <exclude>README.md</exclude>
             <exclude>**/*.json</exclude>
+            <exclude>**/*.log</exclude>
           </excludes>
         </configuration>
         <executions>


[8/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/solrconfig.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/solrconfig.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/solrconfig.xml
index 6544dff..8244a08 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/solrconfig.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/history/conf/solrconfig.xml
@@ -1,4 +1,4 @@
-<?xml version='1.0' encoding='UTF-8'?>
+<?xml version="1.0" encoding="UTF-8" ?>
 <!--
  Licensed to the Apache Software Foundation (ASF) under one or more
  contributor license agreements.  See the NOTICE file distributed with
@@ -15,34 +15,1859 @@
  See the License for the specific language governing permissions and
  limitations under the License.
 -->
+
+<!-- 
+     For more details about configurations options that may appear in
+     this file, see http://wiki.apache.org/solr/SolrConfigXml. 
+-->
 <config>
-	<luceneMatchVersion>4.10.2</luceneMatchVersion>
-
-	<updateRequestProcessorChain name="composite-id">
-		<processor class="solr.CloneFieldUpdateProcessorFactory">
-			<str name="source">username</str>
-			<str name="source">filtername</str>
-			<str name="dest">composite_filtername-username</str>
-		</processor>
-		<processor class="solr.ConcatFieldUpdateProcessorFactory">
-			<str name="fieldName">composite_filtername-username</str>
-			<str name="delimiter">-</str>
-		</processor>
-		<processor class="solr.LogUpdateProcessorFactory" />
-		<processor class="solr.RunUpdateProcessorFactory" />
-	</updateRequestProcessorChain>
-
-	<requestHandler name='standard' class='solr.StandardRequestHandler'
-		default='true' />
-	<requestHandler name="/update" class="solr.UpdateRequestHandler">
-		<lst name="defaults">
-			<str name="update.chain">composite-id</str>
-		</lst>
-	</requestHandler>
-	<requestHandler name='/admin/'
-		class='org.apache.solr.handler.admin.AdminHandlers' />
-
-	<admin>
-		<defaultQuery>*:*</defaultQuery>
-	</admin>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>5.0.0</luceneMatchVersion>
+
+  <!-- <lib/> directives can be used to instruct Solr to load any Jars
+       identified and use them to resolve any "plugins" specified in
+       your solrconfig.xml or schema.xml (ie: Analyzers, Request
+       Handlers, etc...).
+
+       All directories and paths are resolved relative to the
+       instanceDir.
+
+       Please note that <lib/> directives are processed in the order
+       that they appear in your solrconfig.xml file, and are "stacked" 
+       on top of each other when building a ClassLoader - so if you have 
+       plugin jars with dependencies on other jars, the "lower level" 
+       dependency jars should be loaded first.
+
+       If a "./lib" directory exists in your instanceDir, all files
+       found in it are included as if you had used the following
+       syntax...
+       
+              <lib dir="./lib" />
+    -->
+
+  <!-- A 'dir' option by itself adds any files found in the directory 
+       to the classpath, this is useful for including all jars in a
+       directory.
+
+       When a 'regex' is specified in addition to a 'dir', only the
+       files in that directory which completely match the regex
+       (anchored on both ends) will be included.
+
+       If a 'dir' option (with or without a regex) is used and nothing
+       is found that matches, a warning will be logged.
+
+       The examples below can be used to load some solr-contribs along 
+       with their external dependencies.
+    -->
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-dataimporthandler-.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/extraction/lib" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-cell-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/clustering/lib/" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-clustering-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/langid/lib/" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-langid-\d.*\.jar" />
+
+  <lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
+  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
+
+  <!-- an exact 'path' can be used instead of a 'dir' to specify a 
+       specific jar file.  This will cause a serious error to be logged 
+       if it can't be loaded.
+    -->
+  <!--
+     <lib path="../a-jar-that-does-not-exist.jar" /> 
+  -->
+  
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+       
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory,
+       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based, not
+       persistent, and doesn't work with replication.
+    -->
+  <directoryFactory name="DirectoryFactory" 
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
+    
+         
+    <!-- These will be used if you are using the solr.HdfsDirectoryFactory,
+         otherwise they will be ignored. If you don't plan on using hdfs,
+         you can safely remove this section. -->      
+    <!-- The root directory that collection data should be written to. -->     
+    <str name="solr.hdfs.home">${solr.hdfs.home:}</str>
+    <!-- The hadoop configuration files to use for the hdfs client. -->    
+    <str name="solr.hdfs.confdir">${solr.hdfs.confdir:}</str>
+    <!-- Enable/Disable the hdfs cache. -->    
+    <str name="solr.hdfs.blockcache.enabled">${solr.hdfs.blockcache.enabled:true}</str>
+    <!-- Enable/Disable using one global cache for all SolrCores. 
+         The settings used will be from the first HdfsDirectoryFactory created. -->    
+    <str name="solr.hdfs.blockcache.global">${solr.hdfs.blockcache.global:true}</str>
+    
+  </directoryFactory> 
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <!-- To enable dynamic schema REST APIs, use the following for <schemaFactory>: -->
+  
+       <schemaFactory class="ManagedIndexSchemaFactory">
+         <bool name="mutable">true</bool>
+         <str name="managedSchemaResourceName">managed-schema</str>
+       </schemaFactory>
+<!--       
+       When ManagedIndexSchemaFactory is specified, Solr will load the schema from
+       the resource named in 'managedSchemaResourceName', rather than from schema.xml.
+       Note that the managed schema resource CANNOT be named schema.xml.  If the managed
+       schema does not exist, Solr will create it after reading schema.xml, then rename
+       'schema.xml' to 'schema.xml.bak'. 
+       
+       Do NOT hand edit the managed schema - external modifications will be ignored and
+       overwritten as a result of schema modification REST API calls.
+
+       When ManagedIndexSchemaFactory is specified with mutable = true, schema
+       modification REST API calls will be allowed; otherwise, error responses will be
+       sent back for these requests. 
+
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+  -->
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+       
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a 
+         LimitTokenCountFilterFactory in your fieldType definition. E.g. 
+     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
+    -->
+    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
+    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
+    <!-- LogSearch customization to avoid timeouts -->
+    <writeLockTimeout>10000</writeLockTimeout>
+
+    <!-- The maximum number of simultaneous threads that may be
+         indexing documents at once in IndexWriter; if more than this
+         many threads arrive they will wait for others to finish.
+         Default in Solr/Lucene is 8. -->
+    <!-- <maxIndexingThreads>8</maxIndexingThreads>  -->
+    <!-- LogSearch customization of increase performance -->
+    <maxIndexingThreads>50</maxIndexingThreads>
+
+    <!-- Expert: Enabling compound file will use less files for the index, 
+         using fewer file descriptors on the expense of performance decrease. 
+         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
+    <!-- <useCompoundFile>false</useCompoundFile> -->
+
+    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
+         indexing for buffering added documents and deletions before they are
+         flushed to the Directory.
+         maxBufferedDocs sets a limit on the number of documents buffered
+         before flushing.
+         If both ramBufferSizeMB and maxBufferedDocs is set, then
+         Lucene will flush based on whichever limit is hit first.
+         The default is 100 MB.  -->
+    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
+    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
+
+    <!-- Expert: Merge Policy 
+         The Merge Policy in Lucene controls how merging of segments is done.
+         The default since Solr/Lucene 3.3 is TieredMergePolicy.
+         The default since Lucene 2.3 was the LogByteSizeMergePolicy,
+         Even older versions of Lucene used LogDocMergePolicy.
+      -->
+    <!--
+        <mergePolicy class="org.apache.lucene.index.TieredMergePolicy">
+          <int name="maxMergeAtOnce">10</int>
+          <int name="segmentsPerTier">10</int>
+        </mergePolicy>
+      -->
+       
+    <!-- Merge Factor
+         The merge factor controls how many segments will get merged at a time.
+         For TieredMergePolicy, mergeFactor is a convenience parameter which
+         will set both MaxMergeAtOnce and SegmentsPerTier at once.
+         For LogByteSizeMergePolicy, mergeFactor decides how many new segments
+         will be allowed before they are merged into one.
+         Default is 10 for both merge policies.
+      -->
+    <!-- 
+    <mergeFactor>10</mergeFactor>
+      -->
+    <!-- LogSearch customization. Set to 5 to trigger purging of deleted documents more often -->
+    <mergeFactor>5</mergeFactor>
+
+    <!-- Expert: Merge Scheduler
+         The Merge Scheduler in Lucene controls how merges are
+         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
+         can perform merges in the background using separate threads.
+         The SerialMergeScheduler (Lucene 2.2 default) does not.
+     -->
+    <!-- 
+       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+       -->
+
+    <!-- LockFactory 
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+      
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         http://wiki.apache.org/lucene-java/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Unlock On Startup
+
+         If true, unlock any held write or commit locks on startup.
+         This defeats the locking mechanism that allows multiple
+         processes to safely access a lucene index, and should be used
+         with care. Default is "false".
+
+         This is not needed if lock type is 'single'
+     -->
+    <!--
+    <unlockOnStartup>false</unlockOnStartup>
+      -->
+
+    <!-- Commit Deletion Policy
+         Custom deletion policies can be specified here. The class must
+         implement org.apache.lucene.index.IndexDeletionPolicy.
+
+         The default Solr IndexDeletionPolicy implementation supports
+         deleting index commit points on number of commits, age of
+         commit point and optimized status.
+         
+         The latest commit point should always be preserved regardless
+         of the criteria.
+    -->
+    <!-- 
+    <deletionPolicy class="solr.SolrDeletionPolicy">
+    -->
+      <!-- The number of commit points to be kept -->
+      <!-- <str name="maxCommitsToKeep">1</str> -->
+      <!-- The number of optimized commit points to be kept -->
+      <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
+      <!--
+          Delete all commit points once they have reached the given age.
+          Supports DateMathParser syntax e.g.
+        -->
+      <!--
+         <str name="maxCommitAge">30MINUTES</str>
+         <str name="maxCommitAge">1DAY</str>
+      -->
+    <!-- 
+    </deletionPolicy>
+    -->
+
+    <!-- Lucene Infostream
+       
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting the value to true will instruct the underlying Lucene
+         IndexWriter to write its info stream to solr's log. By default,
+         this is enabled here, and controlled through log4j.properties.
+      -->
+     <infoStream>true</infoStream>
+  </indexConfig>
+
+
+  <!-- JMX
+       
+       This example enables JMX if and only if an existing MBeanServer
+       is found, use this if you want to configure JMX through JVM
+       parameters. Remove this to disable exposing Solr configuration
+       and statistics to JMX.
+
+       For more details see http://wiki.apache.org/solr/SolrJmx
+    -->
+  <jmx />
+  <!-- If you want to connect to a particular server, specify the
+       agentId 
+    -->
+  <!-- <jmx agentId="myAgent" /> -->
+  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
+  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
+    -->
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.  --> 
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+    </updateLog>
+ 
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents. 
+
+         http://wiki.apache.org/solr/UpdateXmlMessages
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit. 
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+     <autoCommit> 
+       <maxTime>${solr.autoCommit.maxTime:15000}</maxTime> 
+       <openSearcher>false</openSearcher> 
+     </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+
+     <autoSoftCommit> 
+       <maxTime>${solr.autoSoftCommit.maxTime:5000}</maxTime> 
+     </autoSoftCommit>
+
+    <!-- Update Related Event Listeners
+         
+         Various IndexWriter related events can trigger Listeners to
+         take actions.
+
+         postCommit - fired after every commit or optimize command
+         postOptimize - fired after every optimize command
+      -->
+    <!-- The RunExecutableListener executes an external command from a
+         hook such as postCommit or postOptimize.
+         
+         exe - the name of the executable to run
+         dir - dir to use as the current working directory. (default=".")
+         wait - the calling thread waits until the executable returns. 
+                (default="true")
+         args - the arguments to pass to the program.  (default is none)
+         env - environment variables to set.  (default is none)
+      -->
+    <!-- This example shows how RunExecutableListener could be used
+         with the script based replication...
+         http://wiki.apache.org/solr/CollectionDistribution
+      -->
+    <!--
+       <listener event="postCommit" class="solr.RunExecutableListener">
+         <str name="exe">solr/bin/snapshooter</str>
+         <str name="dir">.</str>
+         <bool name="wait">true</bool>
+         <arr name="args"> <str>arg1</str> <str>arg2</str> </arr>
+         <arr name="env"> <str>MYVAR=val1</str> </arr>
+       </listener>
+      -->
+
+  </updateHandler>
+  
+  <!-- IndexReaderFactory
+
+       Use the following format to specify a custom IndexReaderFactory,
+       which allows for alternate IndexReader implementations.
+
+       ** Experimental Feature **
+
+       Please note - Using a custom IndexReaderFactory may prevent
+       certain other features from working. The API to
+       IndexReaderFactory may change without warning or may even be
+       removed from future releases if the problems cannot be
+       resolved.
+
+
+       ** Features that may not work with custom IndexReaderFactory **
+
+       The ReplicationHandler assumes a disk-resident index. Using a
+       custom IndexReader implementation may cause incompatibility
+       with ReplicationHandler and may cause replication to not work
+       correctly. See SOLR-1366 for details.
+
+    -->
+  <!--
+  <indexReaderFactory name="IndexReaderFactory" class="package.class">
+    <str name="someArg">Some Value</str>
+  </indexReaderFactory >
+  -->
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+    <!-- Max Boolean Clauses
+
+         Maximum number of clauses in each BooleanQuery,  an exception
+         is thrown if exceeded.
+
+         ** WARNING **
+         
+         This option actually modifies a global Lucene property that
+         will affect all SolrCores.  If multiple solrconfig.xml files
+         disagree on this property, the value at any given moment will
+         be based on the last SolrCore to be initialized.
+         
+      -->
+    <maxBooleanClauses>1024</maxBooleanClauses>
+
+
+    <!-- Solr Internal Query Caches
+
+         There are two implementations of cache available for Solr,
+         LRUCache, based on a synchronized LinkedHashMap, and
+         FastLRUCache, based on a ConcurrentHashMap.  
+
+         FastLRUCache has faster gets and slower puts in single
+         threaded operation and thus is generally faster than LRUCache
+         when the hit ratio of the cache is high (> 75%), and may be
+         faster under other scenarios on multi-cpu systems.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate.  For
+         LRUCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation LRUCache or
+               (LRUCache or FastLRUCache)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               and old cache.  
+      -->
+    <filterCache class="solr.FastLRUCache"
+                 size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+         
+         Caches results of searches - ordered lists of document ids
+         (DocList) based on a query, a sort, and the range of documents requested.  
+      -->
+    <queryResultCache class="solr.LRUCache"
+                     size="512"
+                     initialSize="512"
+                     autowarmCount="0"/>
+   
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.  
+      -->
+    <documentCache class="solr.LRUCache"
+                   size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+    
+    <!-- custom cache currently used by block join --> 
+    <cache name="perSegFilter"
+      class="solr.search.LRUCache"
+      size="10"
+      initialSize="0"
+      autowarmCount="10"
+      regenerator="solr.NoOpRegenerator" />
+
+    <!-- Field Value Cache
+         
+         Cache used to hold field values that are quickly accessible
+         by document id.  The fieldValueCache is created by default
+         even if not configured here.
+      -->
+    <!--
+       <fieldValueCache class="solr.FastLRUCache"
+                        size="512"
+                        autowarmCount="128"
+                        showItems="32" />
+      -->
+
+    <!-- Custom Cache
+
+         Example of a generic cache.  These caches may be accessed by
+         name through SolrIndexSearcher.getCache(),cacheLookup(), and
+         cacheInsert().  The purpose is to enable easy caching of
+         user/application level data.  The regenerator argument should
+         be specified as an implementation of solr.CacheRegenerator 
+         if autowarming is desired.  
+      -->
+    <!--
+       <cache name="myUserCache"
+              class="solr.LRUCache"
+              size="4096"
+              initialSize="1024"
+              autowarmCount="1024"
+              regenerator="com.mycompany.MyRegenerator"
+              />
+      -->
+
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+   <!-- Use Filter For Sorted Query
+
+        A possible optimization that attempts to use a filter to
+        satisfy a search.  If the requested sort does not include
+        score, then the filterCache will be checked for a filter
+        matching the query. If found, the filter will be used as the
+        source of document ids, and then the sort will be applied to
+        that.
+
+        For most situations, this will not be useful unless you
+        frequently get the same search repeatedly with different sort
+        options, and none of them ever use "score"
+     -->
+   <!--
+      <useFilterForSortedQuery>true</useFilterForSortedQuery>
+     -->
+
+   <!-- Result Window Size
+
+        An optimization for use with the queryResultCache.  When a search
+        is requested, a superset of the requested number of document ids
+        are collected.  For example, if a search for a particular query
+        requests matching documents 10 through 19, and queryWindowSize is 50,
+        then documents 0 through 49 will be collected and cached.  Any further
+        requests in that range can be satisfied via the cache.  
+     -->
+   <queryResultWindowSize>20</queryResultWindowSize>
+
+   <!-- Maximum number of documents to cache for any entry in the
+        queryResultCache. 
+     -->
+   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+   <!-- Query Related Event Listeners
+
+        Various IndexSearcher related events can trigger Listeners to
+        take actions.
+
+        newSearcher - fired whenever a new searcher is being prepared
+        and there is a current searcher handling requests (aka
+        registered).  It can be used to prime certain caches to
+        prevent long request times for certain requests.
+
+        firstSearcher - fired whenever a new searcher is being
+        prepared but there is no current registered searcher to handle
+        requests or to gain autowarming data from.
+
+        
+     -->
+    <!-- QuerySenderListener takes an array of NamedList and executes a
+         local query request for each NamedList in sequence. 
+      -->
+    <listener event="newSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
+           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
+          -->
+      </arr>
+    </listener>
+    <listener event="firstSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <lst>
+          <str name="q">static firstSearcher warming in solrconfig.xml</str>
+        </lst>
+      </arr>
+    </listener>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>true</useColdSearcher>
+
+    <!-- Max Warming Searchers
+         
+         Maximum number of searchers that may be warming in the
+         background concurrently.  An error is returned if this limit
+         is exceeded.
+
+         Recommend values of 1-2 for read-only slaves, higher for
+         masters w/o cache warming.
+      -->
+    <maxWarmingSearchers>2</maxWarmingSearchers>
+
+  </query>
+
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+       handleSelect is a legacy option that affects the behavior of requests
+       such as /select?qt=XXX
+
+       handleSelect="true" will cause the SolrDispatchFilter to process
+       the request and dispatch the query to a handler specified by the 
+       "qt" param, assuming "/select" isn't already registered.
+
+       handleSelect="false" will cause the SolrDispatchFilter to
+       ignore "/select" requests, resulting in a 404 unless a handler
+       is explicitly registered with the name "/select"
+
+       handleSelect="true" is not recommended for new users, but is the default
+       for backwards compatibility
+    -->
+  <requestDispatcher handleSelect="false" >
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         enableRemoteStreaming - enables use of the stream.file
+         and stream.url parameters for specifying remote streams.
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+         
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+         
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the 
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom 
+         plugins.
+         
+         *** WARNING ***
+         The settings below authorize Solr to fetch remote files, You
+         should make sure your system has some authentication before
+         using enableRemoteStreaming="true"
+
+      --> 
+    <requestParsers enableRemoteStreaming="true" 
+                    multipartUploadLimitInKB="2048000"
+                    formdataUploadLimitInKB="2048"
+                    addHttpRequestToContext="false"/>
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+    <!-- If you include a <cacheControl> directive, it will be used to
+         generate a Cache-Control header (as well as an Expires header
+         if the value contains "max-age=")
+         
+         By default, no Cache-Control header is generated.
+         
+         You can use the <cacheControl> option even if you have set
+         never304="true"
+      -->
+    <!--
+       <httpCaching never304="true" >
+         <cacheControl>max-age=30, public</cacheControl> 
+       </httpCaching>
+      -->
+    <!-- To enable Solr to respond with automatically generated HTTP
+         Caching headers, and to response to Cache Validation requests
+         correctly, set the value of never304="false"
+         
+         This will cause Solr to generate Last-Modified and ETag
+         headers based on the properties of the Index.
+
+         The following options can also be specified to affect the
+         values of these headers...
+
+         lastModFrom - the default value is "openTime" which means the
+         Last-Modified value (and validation against If-Modified-Since
+         requests) will all be relative to when the current Searcher
+         was opened.  You can change it to lastModFrom="dirLastMod" if
+         you want the value to exactly correspond to when the physical
+         index was last modified.
+
+         etagSeed="..." is an option you can change to force the ETag
+         header (and validation against If-None-Match requests) to be
+         different even if the index has not changed (ie: when making
+         significant changes to your config file)
+
+         (lastModifiedFrom and etagSeed are both ignored if you use
+         the never304="true" option)
+      -->
+    <!--
+       <httpCaching lastModifiedFrom="openTime"
+                    etagSeed="Solr">
+         <cacheControl>max-age=30, public</cacheControl> 
+       </httpCaching>
+      -->
+  </requestDispatcher>
+
+  <!-- Request Handlers 
+
+       http://wiki.apache.org/solr/SolrRequestHandler
+
+       Incoming queries will be dispatched to a specific handler by name
+       based on the path specified in the request.
+
+       Legacy behavior: If the request path uses "/select" but no Request
+       Handler has that name, and if handleSelect="true" has been specified in
+       the requestDispatcher, then the Request Handler is dispatched based on
+       the qt parameter.  Handlers without a leading '/' are accessed this way
+       like so: http://host/app/[core/]select?qt=name  If no qt is
+       given, then the requestHandler that declares default="true" will be
+       used or the one named "standard".
+
+       If a Request Handler is declared with startup="lazy", then it will
+       not be initialized until the first request that uses it.
+
+    -->
+
+  <requestHandler name="/dataimport" class="solr.DataImportHandler">
+    <lst name="defaults">
+      <str name="config">solr-data-config.xml</str>
+    </lst>
+  </requestHandler>
+
+  <!-- SearchHandler
+
+       http://wiki.apache.org/solr/SearchHandler
+
+       For processing Search Queries, the primary Request Handler
+       provided with Solr is "SearchHandler" It delegates to a sequent
+       of SearchComponents (see below) and supports distributed
+       queries across multiple shards
+    -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <!-- default values for query parameters can be specified, these
+         will be overridden by parameters in the request
+      -->
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <int name="rows">10</int>
+       <str name="df">text</str>
+     </lst>
+    <!-- In addition to defaults, "appends" params can be specified
+         to identify values which should be appended to the list of
+         multi-val params from the query (or the existing "defaults").
+      -->
+    <!-- In this example, the param "fq=instock:true" would be appended to
+         any query time fq params the user may specify, as a mechanism for
+         partitioning the index, independent of any user selected filtering
+         that may also be desired (perhaps as a result of faceted searching).
+
+         NOTE: there is *absolutely* nothing a client can do to prevent these
+         "appends" values from being used, so don't use this mechanism
+         unless you are sure you always want it.
+      -->
+    <!--
+       <lst name="appends">
+         <str name="fq">inStock:true</str>
+       </lst>
+      -->
+    <!-- "invariants" are a way of letting the Solr maintainer lock down
+         the options available to Solr clients.  Any params values
+         specified here are used regardless of what values may be specified
+         in either the query, the "defaults", or the "appends" params.
+
+         In this example, the facet.field and facet.query params would
+         be fixed, limiting the facets clients can use.  Faceting is
+         not turned on by default - but if the client does specify
+         facet=true in the request, these are the only facets they
+         will be able to see counts for; regardless of what other
+         facet.field or facet.query params they may specify.
+
+         NOTE: there is *absolutely* nothing a client can do to prevent these
+         "invariants" values from being used, so don't use this mechanism
+         unless you are sure you always want it.
+      -->
+    <!--
+       <lst name="invariants">
+         <str name="facet.field">cat</str>
+         <str name="facet.field">manu_exact</str>
+         <str name="facet.query">price:[* TO 500]</str>
+         <str name="facet.query">price:[500 TO *]</str>
+       </lst>
+      -->
+    <!-- If the default list of SearchComponents is not desired, that
+         list can either be overridden completely, or components can be
+         prepended or appended to the default list.  (see below)
+      -->
+    <!--
+       <arr name="components">
+         <str>nameOfCustomComponent1</str>
+         <str>nameOfCustomComponent2</str>
+       </arr>
+      -->
+    </requestHandler>
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <str name="wt">json</str>
+       <str name="indent">true</str>
+       <str name="df">text</str>
+     </lst>
+  </requestHandler>
+
+
+  <!-- realtime get handler, guaranteed to return the latest stored fields of
+       any document, without the need to commit or open a new searcher.  The
+       current implementation relies on the updateLog feature being enabled.
+
+       ** WARNING **
+       Do NOT disable the realtime get handler at /get if you are using
+       SolrCloud otherwise any leader election will cause a full sync in ALL
+       replicas for the shard in question. Similarly, a replica recovery will
+       also always fetch the complete index from the leader because a partial
+       sync will not be possible in the absence of this handler.
+  -->
+  <requestHandler name="/get" class="solr.RealTimeGetHandler">
+     <lst name="defaults">
+       <str name="omitHeader">true</str>
+       <str name="wt">json</str>
+       <str name="indent">true</str>
+     </lst>
+  </requestHandler>
+
+
+  <!-- A Robust Example
+
+       This example SearchHandler declaration shows off usage of the
+       SearchHandler with many defaults declared
+
+       Note that multiple instances of the same Request Handler
+       (SearchHandler) can be registered multiple times with different
+       names (and different init parameters)
+    -->
+  <requestHandler name="/browse" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+
+      <!-- VelocityResponseWriter settings -->
+      <str name="wt">velocity</str>
+      <str name="v.template">browse</str>
+      <str name="v.layout">layout</str>
+
+      <!-- Query settings -->
+      <str name="defType">edismax</str>
+      <str name="q.alt">*:*</str>
+      <str name="rows">10</str>
+      <str name="fl">*,score</str>
+
+      <!-- Faceting defaults -->
+      <str name="facet">on</str>
+      <str name="facet.mincount">1</str>
+    </lst>
+  </requestHandler>
+
+
+  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
+    <lst name="defaults">
+      <str name="df">text</str>
+      <str name="update.chain">add-unknown-fields-to-the-schema</str>
+    </lst>
+  </initParams>
+
+  <!-- Update Request Handler.
+       
+       http://wiki.apache.org/solr/UpdateXmlMessages
+
+       The canonical Request Handler for Modifying the Index through
+       commands specified using XML, JSON, CSV, or JAVABIN
+
+       Note: Since solr1.1 requestHandlers requires a valid content
+       type header if posted in the body. For example, curl now
+       requires: -H 'Content-type:text/xml; charset=utf-8'
+       
+       To override the request content type and force a specific 
+       Content-type, use the request parameter: 
+         ?update.contentType=text/csv
+       
+       This handler will pick a response format to match the input
+       if the 'wt' parameter is not explicit
+    -->
+  <requestHandler name="/update" class="solr.UpdateRequestHandler">
+    <!-- See below for information on defining 
+         updateRequestProcessorChains that can be used by name 
+         on each Update Request
+      -->
+    <!--
+       <lst name="defaults">
+         <str name="update.chain">dedupe</str>
+       </lst>
+       -->
+  </requestHandler>
+
+  <!-- Solr Cell Update Request Handler
+
+       http://wiki.apache.org/solr/ExtractingRequestHandler 
+
+    -->
+  <requestHandler name="/update/extract" 
+                  startup="lazy"
+                  class="solr.extraction.ExtractingRequestHandler" >
+    <lst name="defaults">
+      <str name="lowernames">true</str>
+      <str name="uprefix">ignored_</str>
+
+      <!-- capture link hrefs but ignore div attributes -->
+      <str name="captureAttr">true</str>
+      <str name="fmap.a">links</str>
+      <str name="fmap.div">ignored_</str>
+    </lst>
+  </requestHandler>
+
+
+  <!-- Field Analysis Request Handler
+
+       RequestHandler that provides much the same functionality as
+       analysis.jsp. Provides the ability to specify multiple field
+       types and field names in the same request and outputs
+       index-time and query-time analysis for each of them.
+
+       Request parameters are:
+       analysis.fieldname - field name whose analyzers are to be used
+
+       analysis.fieldtype - field type whose analyzers are to be used
+       analysis.fieldvalue - text for index-time analysis
+       q (or analysis.q) - text for query time analysis
+       analysis.showmatch (true|false) - When set to true and when
+           query analysis is performed, the produced tokens of the
+           field value analysis will be marked as "matched" for every
+           token that is produces by the query analysis
+   -->
+  <requestHandler name="/analysis/field" 
+                  startup="lazy"
+                  class="solr.FieldAnalysisRequestHandler" />
+
+
+  <!-- Document Analysis Handler
+
+       http://wiki.apache.org/solr/AnalysisRequestHandler
+
+       An analysis handler that provides a breakdown of the analysis
+       process of provided documents. This handler expects a (single)
+       content stream with the following format:
+
+       <docs>
+         <doc>
+           <field name="id">1</field>
+           <field name="name">The Name</field>
+           <field name="text">The Text Value</field>
+         </doc>
+         <doc>...</doc>
+         <doc>...</doc>
+         ...
+       </docs>
+
+    Note: Each document must contain a field which serves as the
+    unique key. This key is used in the returned response to associate
+    an analysis breakdown to the analyzed document.
+
+    Like the FieldAnalysisRequestHandler, this handler also supports
+    query analysis by sending either an "analysis.query" or "q"
+    request parameter that holds the query text to be analyzed. It
+    also supports the "analysis.showmatch" parameter which when set to
+    true, all field tokens that match the query tokens will be marked
+    as a "match". 
+  -->
+  <requestHandler name="/analysis/document" 
+                  class="solr.DocumentAnalysisRequestHandler" 
+                  startup="lazy" />
+
+  <!-- Admin Handlers
+
+       Admin Handlers - This will register all the standard admin
+       RequestHandlers.  
+    -->
+  <requestHandler name="/admin/" 
+                  class="solr.admin.AdminHandlers" />
+  <!-- This single handler is equivalent to the following... -->
+  <!--
+     <requestHandler name="/admin/luke"       class="solr.admin.LukeRequestHandler" />
+     <requestHandler name="/admin/system"     class="solr.admin.SystemInfoHandler" />
+     <requestHandler name="/admin/plugins"    class="solr.admin.PluginInfoHandler" />
+     <requestHandler name="/admin/threads"    class="solr.admin.ThreadDumpHandler" />
+     <requestHandler name="/admin/properties" class="solr.admin.PropertiesRequestHandler" />
+     <requestHandler name="/admin/file"       class="solr.admin.ShowFileRequestHandler" >
+    -->
+  <!-- If you wish to hide files under ${solr.home}/conf, explicitly
+       register the ShowFileRequestHandler using the definition below. 
+       NOTE: The glob pattern ('*') is the only pattern supported at present, *.xml will
+             not exclude all files ending in '.xml'. Use it to exclude _all_ updates
+    -->
+  <!--
+     <requestHandler name="/admin/file" 
+                     class="solr.admin.ShowFileRequestHandler" >
+       <lst name="invariants">
+         <str name="hidden">synonyms.txt</str> 
+         <str name="hidden">anotherfile.txt</str> 
+         <str name="hidden">*</str> 
+       </lst>
+     </requestHandler>
+    -->
+
+  <!--
+    Enabling this request handler (which is NOT a default part of the admin handler) will allow the Solr UI to edit
+    all the config files. This is intended for secure/development use ONLY! Leaving available and publically
+    accessible is a security vulnerability and should be done with extreme caution!
+  -->
+  <!--
+  <requestHandler name="/admin/fileedit" class="solr.admin.EditFileRequestHandler" >
+    <lst name="invariants">
+         <str name="hidden">synonyms.txt</str>
+         <str name="hidden">anotherfile.txt</str>
+    </lst>
+  </requestHandler>
+  -->
+  <!-- ping/healthcheck -->
+  <requestHandler name="/admin/ping" class="solr.PingRequestHandler">
+    <lst name="invariants">
+      <str name="q">solrpingquery</str>
+    </lst>
+    <lst name="defaults">
+      <str name="echoParams">all</str>
+    </lst>
+    <!-- An optional feature of the PingRequestHandler is to configure the 
+         handler with a "healthcheckFile" which can be used to enable/disable 
+         the PingRequestHandler.
+         relative paths are resolved against the data dir 
+      -->
+    <!-- <str name="healthcheckFile">server-enabled.txt</str> -->
+  </requestHandler>
+
+  <!-- Echo the request contents back to the client -->
+  <requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
+    <lst name="defaults">
+     <str name="echoParams">explicit</str> 
+     <str name="echoHandler">true</str>
+    </lst>
+  </requestHandler>
+  
+  <!-- Solr Replication
+
+       The SolrReplicationHandler supports replicating indexes from a
+       "master" used for indexing and "slaves" used for queries.
+
+       http://wiki.apache.org/solr/SolrReplication 
+
+       It is also necessary for SolrCloud to function (in Cloud mode, the
+       replication handler is used to bulk transfer segments when nodes 
+       are added or need to recover).
+
+       https://wiki.apache.org/solr/SolrCloud/
+    -->
+  <requestHandler name="/replication" class="solr.ReplicationHandler" > 
+    <!--
+       To enable simple master/slave replication, uncomment one of the 
+       sections below, depending on whether this solr instance should be
+       the "master" or a "slave".  If this instance is a "slave" you will 
+       also need to fill in the masterUrl to point to a real machine.
+    -->
+    <!--
+       <lst name="master">
+         <str name="replicateAfter">commit</str>
+         <str name="replicateAfter">startup</str>
+         <str name="confFiles">schema.xml,stopwords.txt</str>
+       </lst>
+    -->
+    <!--
+       <lst name="slave">
+         <str name="masterUrl">http://your-master-hostname:8983/solr</str>
+         <str name="pollInterval">00:00:60</str>
+       </lst>
+    -->
+  </requestHandler>
+
+  <!-- Search Components
+
+       Search components are registered to SolrCore and used by 
+       instances of SearchHandler (which can access them by name)
+       
+       By default, the following components are available:
+       
+       <searchComponent name="query"     class="solr.QueryComponent" />
+       <searchComponent name="facet"     class="solr.FacetComponent" />
+       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
+       <searchComponent name="highlight" class="solr.HighlightComponent" />
+       <searchComponent name="stats"     class="solr.StatsComponent" />
+       <searchComponent name="debug"     class="solr.DebugComponent" />
+   
+       Default configuration in a requestHandler would look like:
+
+       <arr name="components">
+         <str>query</str>
+         <str>facet</str>
+         <str>mlt</str>
+         <str>highlight</str>
+         <str>stats</str>
+         <str>debug</str>
+       </arr>
+
+       If you register a searchComponent to one of the standard names, 
+       that will be used instead of the default.
+
+       To insert components before or after the 'standard' components, use:
+    
+       <arr name="first-components">
+         <str>myFirstComponentName</str>
+       </arr>
+    
+       <arr name="last-components">
+         <str>myLastComponentName</str>
+       </arr>
+
+       NOTE: The component registered with the name "debug" will
+       always be executed after the "last-components" 
+       
+     -->
+  
+   <!-- Spell Check
+
+        The spell check component can return a list of alternative spelling
+        suggestions.  
+
+        http://wiki.apache.org/solr/SpellCheckComponent
+     -->
+  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
+
+    <str name="queryAnalyzerFieldType">key_lower_case</str>
+
+    <!-- Multiple "Spell Checkers" can be declared and used by this
+         component
+      -->
+
+    <!-- a spellchecker built from a field of the main index -->
+    <lst name="spellchecker">
+      <str name="name">default</str>
+      <str name="field">text</str>
+      <str name="classname">solr.DirectSolrSpellChecker</str>
+      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
+      <str name="distanceMeasure">internal</str>
+      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
+      <float name="accuracy">0.5</float>
+      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
+      <int name="maxEdits">2</int>
+      <!-- the minimum shared prefix when enumerating terms -->
+      <int name="minPrefix">1</int>
+      <!-- maximum number of inspections per result. -->
+      <int name="maxInspections">5</int>
+      <!-- minimum length of a query term to be considered for correction -->
+      <int name="minQueryLength">4</int>
+      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
+      <float name="maxQueryFrequency">0.01</float>
+      <!-- uncomment this to require suggestions to occur in 1% of the documents
+      	<float name="thresholdTokenFrequency">.01</float>
+      -->
+    </lst>
+    
+    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
+    <lst name="spellchecker">
+      <str name="name">wordbreak</str>
+      <str name="classname">solr.WordBreakSolrSpellChecker</str>      
+      <str name="field">name</str>
+      <str name="combineWords">true</str>
+      <str name="breakWords">true</str>
+      <int name="maxChanges">10</int>
+    </lst>
+
+    <!-- a spellchecker that uses a different distance measure -->
+    <!--
+       <lst name="spellchecker">
+         <str name="name">jarowinkler</str>
+         <str name="field">spell</str>
+         <str name="classname">solr.DirectSolrSpellChecker</str>
+         <str name="distanceMeasure">
+           org.apache.lucene.search.spell.JaroWinklerDistance
+         </str>
+       </lst>
+     -->
+
+    <!-- a spellchecker that use an alternate comparator 
+
+         comparatorClass be one of:
+          1. score (default)
+          2. freq (Frequency first, then score)
+          3. A fully qualified class name
+      -->
+    <!--
+       <lst name="spellchecker">
+         <str name="name">freq</str>
+         <str name="field">lowerfilt</str>
+         <str name="classname">solr.DirectSolrSpellChecker</str>
+         <str name="comparatorClass">freq</str>
+      -->
+
+    <!-- A spellchecker that reads the list of words from a file -->
+    <!--
+       <lst name="spellchecker">
+         <str name="classname">solr.FileBasedSpellChecker</str>
+         <str name="name">file</str>
+         <str name="sourceLocation">spellings.txt</str>
+         <str name="characterEncoding">UTF-8</str>
+         <str name="spellcheckIndexDir">spellcheckerFile</str>
+       </lst>
+      -->
+  </searchComponent>
+  
+  <!-- A request handler for demonstrating the spellcheck component.  
+
+       NOTE: This is purely as an example.  The whole purpose of the
+       SpellCheckComponent is to hook it into the request handler that
+       handles your normal user queries so that a separate request is
+       not needed to get suggestions.
+
+       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
+       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
+       
+       See http://wiki.apache.org/solr/SpellCheckComponent for details
+       on the request parameters.
+    -->
+  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <str name="df">text</str>
+      <!-- Solr will use suggestions from both the 'default' spellchecker
+           and from the 'wordbreak' spellchecker and combine them.
+           collations (re-written queries) can include a combination of
+           corrections from both spellcheckers -->
+      <str name="spellcheck.dictionary">default</str>
+      <str name="spellcheck.dictionary">wordbreak</str>
+      <str name="spellcheck">on</str>
+      <str name="spellcheck.extendedResults">true</str>       
+      <str name="spellcheck.count">10</str>
+      <str name="spellcheck.alternativeTermCount">5</str>
+      <str name="spellcheck.maxResultsForSuggest">5</str>       
+      <str name="spellcheck.collate">true</str>
+      <str name="spellcheck.collateExtendedResults">true</str>  
+      <str name="spellcheck.maxCollationTries">10</str>
+      <str name="spellcheck.maxCollations">5</str>         
+    </lst>
+    <arr name="last-components">
+      <str>spellcheck</str>
+    </arr>
+  </requestHandler>
+
+  <searchComponent name="suggest" class="solr.SuggestComponent">
+  	<lst name="suggester">
+      <str name="name">mySuggester</str>
+      <str name="lookupImpl">FuzzyLookupFactory</str>      <!-- org.apache.solr.spelling.suggest.fst -->
+      <str name="dictionaryImpl">DocumentDictionaryFactory</str>     <!-- org.apache.solr.spelling.suggest.HighFrequencyDictionaryFactory --> 
+      <str name="field">cat</str>
+      <str name="weightField">price</str>
+      <str name="suggestAnalyzerFieldType">string</str>
+    </lst>
+  </searchComponent>
+
+  <requestHandler name="/suggest" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <str name="suggest">true</str>
+      <str name="suggest.count">10</str>
+    </lst>
+    <arr name="components">
+      <str>suggest</str>
+    </arr>
+  </requestHandler>
+  <!-- Term Vector Component
+
+       http://wiki.apache.org/solr/TermVectorComponent
+    -->
+  <searchComponent name="tvComponent" class="solr.TermVectorComponent"/>
+
+  <!-- A request handler for demonstrating the term vector component
+
+       This is purely as an example.
+
+       In reality you will likely want to add the component to your 
+       already specified request handlers. 
+    -->
+  <requestHandler name="/tvrh" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <str name="df">text</str>
+      <bool name="tv">true</bool>
+    </lst>
+    <arr name="last-components">
+      <str>tvComponent</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Clustering Component
+
+       You'll need to set the solr.clustering.enabled system property
+       when running solr to run with clustering enabled:
+
+            java -Dsolr.clustering.enabled=true -jar start.jar
+
+       http://wiki.apache.org/solr/ClusteringComponent
+       http://carrot2.github.io/solr-integration-strategies/
+    -->
+  <searchComponent name="clustering"
+                   enable="${solr.clustering.enabled:false}"
+                   class="solr.clustering.ClusteringComponent" >
+    <lst name="engine">
+      <str name="name">lingo</str>
+
+      <!-- Class name of a clustering algorithm compatible with the Carrot2 framework.
+
+           Currently available open source algorithms are:
+           * org.carrot2.clustering.lingo.LingoClusteringAlgorithm
+           * org.carrot2.clustering.stc.STCClusteringAlgorithm
+           * org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm
+
+           See http://project.carrot2.org/algorithms.html for more information.
+
+           A commercial algorithm Lingo3G (needs to be installed separately) is defined as:
+           * com.carrotsearch.lingo3g.Lingo3GClusteringAlgorithm
+        -->
+      <str name="carrot.algorithm">org.carrot2.clustering.lingo.LingoClusteringAlgorithm</str>
+
+      <!-- Override location of the clustering algorithm's resources 
+           (attribute definitions and lexical resources).
+
+           A directory from which to load algorithm-specific stop words,
+           stop labels and attribute definition XMLs. 
+
+           For an overview of Carrot2 lexical resources, see:
+           http://download.carrot2.org/head/manual/#chapter.lexical-resources
+
+           For an overview of Lingo3G lexical resources, see:
+           http://download.carrotsearch.com/lingo3g/manual/#chapter.lexical-resources
+       -->
+      <str name="carrot.resourcesDir">clustering/carrot2</str>
+    </lst>
+
+    <!-- An example definition for the STC clustering algorithm. -->
+    <lst name="engine">
+      <str name="name">stc</str>
+      <str name="carrot.algorithm">org.carrot2.clustering.stc.STCClusteringAlgorithm</str>
+    </lst>
+
+    <!-- An example definition for the bisecting kmeans clustering algorithm. -->
+    <lst name="engine">
+      <str name="name">kmeans</str>
+      <str name="carrot.algorithm">org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm</str>
+    </lst>
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the clustering component
+
+       This is purely as an example.
+
+       In reality you will likely want to add the component to your 
+       already specified request handlers. 
+    -->
+  <requestHandler name="/clustering"
+                  startup="lazy"
+                  enable="${solr.clustering.enabled:false}"
+                  class="solr.SearchHandler">
+    <lst name="defaults">
+      <bool name="clustering">true</bool>
+      <bool name="clustering.results">true</bool>
+      <!-- Field name with the logical "title" of a each document (optional) -->
+      <str name="carrot.title">name</str>
+      <!-- Field name with the logical "URL" of a each document (optional) -->
+      <str name="carrot.url">id</str>
+      <!-- Field name with the logical "content" of a each document (optional) -->
+      <str name="carrot.snippet">features</str>
+      <!-- Apply highlighter to the title/ content and use this for clustering. -->
+      <bool name="carrot.produceSummary">true</bool>
+      <!-- the maximum number of labels per cluster -->
+      <!--<int name="carrot.numDescriptions">5</int>-->
+      <!-- produce sub clusters -->
+      <bool name="carrot.outputSubClusters">false</bool>
+
+      <!-- Configure the remaining request handler parameters. -->
+      <str name="defType">edismax</str>
+      <str name="qf">
+        text^0.5 features^1.0 name^1.2 sku^1.5 id^10.0 manu^1.1 cat^1.4
+      </str>
+      <str name="q.alt">*:*</str>
+      <str name="rows">10</str>
+      <str name="fl">*,score</str>
+    </lst>
+    <arr name="last-components">
+      <str>clustering</str>
+    </arr>
+  </requestHandler>
+  
+  <!-- Terms Component
+
+       http://wiki.apache.org/solr/TermsComponent
+
+       A component to return terms and document frequency of those
+       terms
+    -->
+  <searchComponent name="terms" class="solr.TermsComponent"/>
+
+  <!-- A request handler for demonstrating the terms component -->
+  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
+     <lst name="defaults">
+      <bool name="terms">true</bool>
+      <bool name="distrib">false</bool>
+    </lst>     
+    <arr name="components">
+      <str>terms</str>
+    </arr>
+  </requestHandler>
+
+
+  <!-- Query Elevation Component
+
+       http://wiki.apache.org/solr/QueryElevationComponent
+
+       a search component that enables you to configure the top
+       results for a given query regardless of the normal lucene
+       scoring.
+    -->
+  <searchComponent name="elevator" class="solr.QueryElevationComponent" >
+    <!-- pick a fieldType to analyze queries -->
+    <str name="queryFieldType">string</str>
+    <str name="config-file">elevate.xml</str>
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the elevator component -->
+  <requestHandler name="/elevate" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <str name="df">text</str>
+    </lst>
+    <arr name="last-components">
+      <str>elevator</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Highlighting Component
+
+       http://wiki.apache.org/solr/HighlightingParameters
+    -->
+  <searchComponent class="solr.HighlightComponent" name="highlight">
+    <highlighting>
+      <!-- Configure the standard fragmenter -->
+      <!-- This could most likely be commented out in the "default" case -->
+      <fragmenter name="gap" 
+                  default="true"
+                  class="solr.highlight.GapFragmenter">
+        <lst name="defaults">
+          <int name="hl.fragsize">100</int>
+        </lst>
+      </fragmenter>
+
+      <!-- A regular-expression-based fragmenter 
+           (for sentence extraction) 
+        -->
+      <fragmenter name="regex" 
+                  class="solr.highlight.RegexFragmenter">
+        <lst name="defaults">
+          <!-- slightly smaller fragsizes work better because of slop -->
+          <int name="hl.fragsize">70</int>
+          <!-- allow 50% slop on fragment sizes -->
+          <float name="hl.regex.slop">0.5</float>
+          <!-- a basic sentence pattern -->
+          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
+        </lst>
+      </fragmenter>
+
+      <!-- Configure the standard formatter -->
+      <formatter name="html" 
+                 default="true"
+                 class="solr.highlight.HtmlFormatter">
+        <lst name="defaults">
+          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
+          <str name="hl.simple.post"><![CDATA[</em>]]></str>
+        </lst>
+      </formatter>
+
+      <!-- Configure the standard encoder -->
+      <encoder name="html" 
+               class="solr.highlight.HtmlEncoder" />
+
+      <!-- Configure the standard fragListBuilder -->
+      <fragListBuilder name="simple" 
+                       class="solr.highlight.SimpleFragListBuilder"/>
+      
+      <!-- Configure the single fragListBuilder -->
+      <fragListBuilder name="single" 
+                       class="solr.highlight.SingleFragListBuilder"/>
+      
+      <!-- Configure the weighted fragListBuilder -->
+      <fragListBuilder name="weighted" 
+                       default="true"
+                       class="solr.highlight.WeightedFragListBuilder"/>
+      
+      <!-- default tag FragmentsBuilder -->
+      <fragmentsBuilder name="default" 
+                        default="true"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <!-- 
+        <lst name="defaults">
+          <str name="hl.multiValuedSeparatorChar">/</str>
+        </lst>
+        -->
+      </fragmentsBuilder>
+
+      <!-- multi-colored tag FragmentsBuilder -->
+      <fragmentsBuilder name="colored" 
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <lst name="defaults">
+          <str name="hl.tag.pre"><![CDATA[
+               <b style="background:yellow">,<b style="background:lawgreen">,
+               <b style="background:aquamarine">,<b style="background:magenta">,
+               <b style="background:palegreen">,<b style="background:coral">,
+               <b style="background:wheat">,<b style="background:khaki">,
+               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
+          <str name="hl.tag.post"><![CDATA[</b>]]></str>
+        </lst>
+      </fragmentsBuilder>
+      
+      <boundaryScanner name="default" 
+                       default="true"
+                       class="solr.highlight.SimpleBoundaryScanner">
+        <lst name="defaults">
+          <str name="hl.bs.maxScan">10</str>
+          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
+        </lst>
+      </boundaryScanner>
+      
+      <boundaryScanner name="breakIterator" 
+                       class="solr.highlight.BreakIteratorBoundaryScanner">
+        <lst name="defaults">
+          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
+          <str name="hl.bs.type">WORD</str>
+          <!-- language and country are used when constructing Locale object.  -->
+          <!-- And the Locale object will be used when getting instance of BreakIterator -->
+          <str name="hl.bs.language">en</str>
+          <str name="hl.bs.country">US</str>
+        </lst>
+      </boundaryScanner>
+    </highlighting>
+  </searchComponent>
+
+  <!-- Update Processors
+
+       Chains of Update Processor Factories for dealing with Update
+       Requests can be declared, and then used by name in Update
+       Request Processors
+
+       http://wiki.apache.org/solr/UpdateRequestProcessor
+
+    --> 
+
+  <!-- Add unknown fields to the schema 
+  
+       An example field type guessing update processor that will
+       attempt to parse string-typed field values as Booleans, Longs,
+       Doubles, or Dates, and then add schema fields with the guessed
+       field types.  
+       
+       This requires that the schema is both managed and mutable, by
+       declaring schemaFactory as ManagedIndexSchemaFactory, with
+       mutable specified as true. 
+       
+       See http://wiki.apache.org/solr/GuessingFieldTypes
+    -->
+  <updateRequestProcessorChain name="add-unknown-fields-to-the-schema">
+
+    <processor class="solr.RemoveBlankFieldUpdateProcessorFactory"/>
+    <processor class="solr.ParseBooleanFieldUpdateProcessorFactory"/>
+    <processor class="solr.ParseLongFieldUpdateProcessorFactory"/>
+    <processor class="solr.ParseDoubleFieldUpdateProcessorFactory"/>
+    <processor class="solr.ParseDateFieldUpdateProcessorFactory">
+      <arr name="format">
+        <str>yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
+        <str>yyyy-MM-dd'T'HH:mm:ss,SSSZ</str>
+        <str>yyyy-MM-dd'T'HH:mm:ss.SSS</str>
+        <str>yyyy-MM-dd'T'HH:mm:ss,SSS</str>
+        <str>yyyy-MM-dd'T'HH:mm:ssZ</str>
+        <str>yyyy-MM-dd'T'HH:mm:ss</str>
+        <str>yyyy-MM-dd'T'HH:mmZ</str>
+        <str>yyyy-MM-dd'T'HH:mm</str>
+        <str>yyyy-MM-dd HH:mm:ss.SSSZ</str>
+        <str>yyyy-MM-dd HH:mm:ss,SSSZ</str>
+        <str>yyyy-MM-dd HH:mm:ss.SSS</str>
+        <str>yyyy-MM-dd HH:mm:ss,SSS</str>
+        <str>yyyy-MM-dd HH:mm:ssZ</str>
+        <str>yyyy-MM-dd HH:mm:ss</str>
+        <str>yyyy-MM-dd HH:mmZ</str>
+        <str>yyyy-MM-dd HH:mm</str>
+        <str>yyyy-MM-dd</str>
+      </arr>
+    </processor>
+    <processor class="solr.AddSchemaFieldsUpdateProcessorFactory">
+      <str name="defaultFieldType">key_lower_case</str>
+      <lst name="typeMapping">
+        <str name="valueClass">java.lang.Boolean</str>
+        <str name="fieldType">booleans</str>
+      </lst>
+      <lst name="typeMapping">
+        <str name="valueClass">java.util.Date</str>
+        <str name="fieldType">tdates</str>
+      </lst>
+      <lst name="typeMapping">
+        <str name="valueClass">java.lang.Long</str>
+        <str name="valueClass">java.lang.Integer</str>
+        <str name="fieldType">tlongs</str>
+      </lst>
+      <lst name="typeMapping">
+        <str name="valueClass">java.lang.Number</str>
+        <str name="fieldType">tdoubles</str>
+      </lst>
+    </processor>
+
+    <processor class="solr.LogUpdateProcessorFactory"/>
+    <processor class="solr.RunUpdateProcessorFactory"/>
+  </updateRequestProcessorChain>
+
+
+  <!-- Deduplication
+
+       An example dedup update processor that creates the "id" field
+       on the fly based on the hash code of some other fields.  This
+       example has overwriteDupes set to false since we are using the
+       id field as the signatureField and Solr will maintain
+       uniqueness based on that anyway.  
+       
+    -->
+  <!--
+     <updateRequestProcessorChain name="dedupe">
+       <processor class="solr.processor.SignatureUpdateProcessorFactory">
+         <bool name="enabled">true</bool>
+         <str name="signatureField">id</str>
+         <bool name="overwriteDupes">false</bool>
+         <str name="fields">name,features,cat</str>
+         <str name="signatureClass">solr.processor.Lookup3Signature</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+  
+  <!-- Language identification
+
+       This example update chain identifies the language of the incoming
+       documents using the langid contrib. The detected language is
+       written to field language_s. No field name mapping is done.
+       The fields used for detection are text, title, subject and description,
+       making this example suitable for detecting languages form full-text
+       rich documents injected via ExtractingRequestHandler.
+       See more about langId at http://wiki.apache.org/solr/LanguageDetection
+    -->
+    <!--
+     <updateRequestProcessorChain name="langid">
+       <processor class="org.apache.solr.update.processor.TikaLanguageIdentifierUpdateProcessorFactory">
+         <str name="langid.fl">text,title,subject,description</str>
+         <str name="langid.langField">language_s</str>
+         <str name="langid.fallback">en</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+
+  <!-- Script update processor
+
+    This example hooks in an update processor implemented using JavaScript.
+
+    See more about the script update processor at http://wiki.apache.org/solr/ScriptUpdateProcessor
+  -->
+  <!--
+    <updateRequestProcessorChain name="script">
+      <processor class="solr.StatelessScriptUpdateProcessorFactory">
+        <str name="script">update-script.js</str>
+        <lst name="params">
+          <str name="config_param">example config parameter</str>
+        </lst>
+      </processor>
+      <processor class="solr.RunUpdateProcessorFactory" />
+    </updateRequestProcessorChain>
+  -->
+ 
+  <!-- Response Writers
+
+       http://wiki.apache.org/solr/QueryResponseWriter
+
+       Request responses will be written using the writer specified by
+       the 'wt' request parameter matching the name of a registered
+       writer.
+
+       The "default" writer is the default and will be used if 'wt' is
+       not specified in the request.
+    -->
+  <!-- The following response writers are implicitly configured unless
+       overridden...
+    -->
+  <!--
+     <queryResponseWriter name="xml" 
+                          default="true"
+                          class="solr.XMLResponseWriter" />
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
+     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
+     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
+     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
+     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
+     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
+     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
+    -->
+
+  <queryResponseWriter name="json" class="solr.JSONResponseWriter">
+     <!-- For the purposes of the tutorial, JSON responses are written as
+      plain text so that they are easy to read in *any* browser.
+      If you expect a MIME type of "application/json" just remove this override.
+     -->
+    <str name="content-type">text/plain; charset=UTF-8</str>
+  </queryResponseWriter>
+  
+  <!--
+     Custom response writers can be declared as needed...
+    -->
+  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
+    <str name="template.base.dir">${velocity.template.base.dir:}</str>
+  </queryResponseWriter>
+
+  <!-- XSLT response writer transforms the XML output by any xslt file found
+       in Solr's conf/xslt directory.  Changes to xslt files are checked for
+       every xsltCacheLifetimeSeconds.  
+    -->
+  <queryResponseWriter name="xslt" class="solr.XSLTResponseWriter">
+    <int name="xsltCacheLifetimeSeconds">5</int>
+  </queryResponseWriter>
+
+  <!-- Query Parsers
+
+       http://wiki.apache.org/solr/SolrQuerySyntax
+
+       Multiple QParserPlugins can be registered by name, and then
+       used in either the "defType" param for the QueryComponent (used
+       by SearchHandler) or in LocalParams
+    -->
+  <!-- example of registering a query parser -->
+  <!--
+     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
+    -->
+
+  <!-- Function Parsers
+
+       http://wiki.apache.org/solr/FunctionQuery
+
+       Multiple ValueSourceParsers can be registered by name, and then
+       used as function names when using the "func" QParser.
+    -->
+  <!-- example of registering a custom function parser  -->
+  <!--
+     <valueSourceParser name="myfunc" 
+                        class="com.mycompany.MyValueSourceParser" />
+    -->
+    
+  
+  <!-- Document Transformers
+       http://wiki.apache.org/solr/DocTransformers
+    -->
+  <!--
+     Could be something like:
+     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
+       <int name="connection">jdbc://....</int>
+     </transformer>
+     
+     To add a constant value to all docs, use:
+     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <int name="value">5</int>
+     </transformer>
+     
+     If you want the user to still be able to change it with _value:something_ use this:
+     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <double name="defaultValue">5</double>
+     </transformer>
+
+      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
+      EditorialMarkerFactory will do exactly that:
+     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
+    -->
+    
+
+  <!-- Legacy config for the admin interface -->
+  <admin>
+    <defaultQuery>*:*</defaultQuery>
+  </admin>
+
 </config>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 7398098..917956f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -52,6 +52,7 @@ public class LogSearchConstants {
   public static final String SEQUNCE_ID = "seq_num";
   public static final String SOLR_COMPONENT = "type";
   public static final String SOLR_LOG_MESSAGE = "log_message";
+  public static final String SOLR_KEY_LOG_MESSAGE = "key_log_message";
   public static final String SOLR_HOST = "host";
   public static final String SOLR_LEVEL = "level";
   public static final String SOLR_THREAD_NAME = "thread_name";
@@ -102,5 +103,29 @@ public class LogSearchConstants {
 
   // logfeeder 
   public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config";
+  public static final String LIST_SEPARATOR = ",";
+  
+  public static final String SORT = "sort";
+  public static final String FL = "fl";
+  
+  //Facet Constant
+  public static final String FACET_FIELD = "facet.field";
+  public static final String FACET_MINCOUNT = "facet.mincount";
+  public static final String FACET_JSON_FIELD = "json.facet";
+  public static final String FACET_PIVOT = "facet.pivot";
+  public static final String FACET_PIVOT_MINCOUNT = "facet.pivot.mincount";
+  public static final String FACET_DATE = "facet.date";
+  public static final String FACET_DATE_START = "facet.date.start";
+  public static final String FACET_DATE_END = "facet.date.end";
+  public static final String FACET_DATE_GAP = "facet.date.gap";
+  public static final String FACET_RANGE = "facet.range";
+  public static final String FACET_RANGE_START = "facet.range.start";
+  public static final String FACET_RANGE_END = "facet.range.end";
+  public static final String FACET_RANGE_GAP = "facet.range.gap";
+  public static final String FACET_GROUP = "group";
+  public static final String FACET_GROUP_MAIN = "group.main";
+  public static final String FACET_GROUP_FIELD = "group.field"; 
+  public static final String FACET_LIMIT = "facet.limit";
+  
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
index 2525e61..94b7159 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
@@ -23,7 +23,6 @@ import java.util.Date;
 import java.util.GregorianCalendar;
 import java.util.TimerTask;
 
-import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.log4j.Logger;
 
 public class ManageStartEndTime extends TimerTask {
@@ -39,10 +38,11 @@ public class ManageStartEndTime extends TimerTask {
 
   @Override
   public void run() {
-    if (startDate == null)
+    if (startDate == null){
       intailizeStartEndTime();
-    else
+    }else{
       adjustStartEndTime();
+    }
   }
 
   private void adjustStartEndTime() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
index 77ee854..59e698f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
@@ -23,27 +23,28 @@ import org.apache.ambari.logsearch.view.VMessage;
 public enum MessageEnums {
 
   // Common Errors
-  DATA_NOT_FOUND("fs.error.data_not_found", "Data not found"), OPER_NOT_ALLOWED_FOR_STATE(
-    "fs.error.oper_not_allowed_for_state",
+  DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"), OPER_NOT_ALLOWED_FOR_STATE(
+    "logsearch.error.oper_not_allowed_for_state",
     "Operation not allowed in current state"), OPER_NOT_ALLOWED_FOR_ENTITY(
-    "fs.error.oper_not_allowed_for_state",
+    "logsearch.error.oper_not_allowed_for_state",
     "Operation not allowed for entity"), OPER_NO_PERMISSION(
-    "fs.error.oper_no_permission",
+    "logsearch.error.oper_no_permission",
     "User doesn't have permission to perform this operation"), DATA_NOT_UPDATABLE(
-    "fs.error.data_not_updatable", "Data not updatable"), ERROR_CREATING_OBJECT(
-    "fs.error.create_object", "Error creating object"), ERROR_DUPLICATE_OBJECT(
-    "fs.error.duplicate_object", "Error creating duplicate object"), ERROR_SYSTEM(
-    "fs.error.system", "System Error. Please try later."),
+    "logsearch.error.data_not_updatable", "Data not updatable"), ERROR_CREATING_OBJECT(
+    "logsearch.error.create_object", "Error creating object"), ERROR_DUPLICATE_OBJECT(
+    "logsearch.error.duplicate_object", "Error creating duplicate object"), ERROR_SYSTEM(
+    "logsearch.error.system", "System Error. Please try later."),
+    SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details check the logs or configuration."),
 
   // Common Validations
-  INVALID_PASSWORD("fs.validation.invalid_password", "Invalid password"), INVALID_INPUT_DATA(
-    "fs.validation.invalid_input_data", "Invalid input data"), NO_INPUT_DATA(
-    "fs.validation.no_input_data", "Input data is not provided"), INPUT_DATA_OUT_OF_BOUND(
-    "fs.validation.data_out_of_bound", "Input data if out of bound"), NO_NAME(
-    "fs.validation.no_name", "Name is not provided"), NO_OR_INVALID_COUNTRY_ID(
-    "fs.validation.no_country_id", "Valid Country Id was not provided"), NO_OR_INVALID_CITY_ID(
-    "fs.validation.no_city_id", "Valid City Id was not provided"), NO_OR_INVALID_STATE_ID(
-    "fs.validation.no_state_id", "Valid State Id was not provided");
+  INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid password"), INVALID_INPUT_DATA(
+    "logsearch.validation.invalid_input_data", "Invalid input data"), NO_INPUT_DATA(
+    "logsearch.validation.no_input_data", "Input data is not provided"), INPUT_DATA_OUT_OF_BOUND(
+    "logsearch.validation.data_out_of_bound", "Input data if out of bound"), NO_NAME(
+    "logsearch.validation.no_name", "Name is not provided"), NO_OR_INVALID_COUNTRY_ID(
+    "logsearch.validation.no_country_id", "Valid Country Id was not provided"), NO_OR_INVALID_CITY_ID(
+    "logsearch.validation.no_city_id", "Valid City Id was not provided"), NO_OR_INVALID_STATE_ID(
+    "logsearch.validation.no_state_id", "Valid State Id was not provided");
 
   String rbKey;
   String messageDesc;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
index a5c9770..d0facbc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
@@ -181,15 +181,14 @@ public class SearchCriteria {
         .getParameter("eMessage")));
     this.addParam(LogSearchConstants.BUNDLE_ID, request.getParameter(LogSearchConstants.BUNDLE_ID));
     this.addParam("host_name", request.getParameter("host_name"));
-    this.addParam("components_name", request.getParameter("components_name"));
+    this.addParam("component_name", request.getParameter("component_name"));
+    this.addParam("file_name", request.getParameter("file_name"));
     this.addParam("startDate", request.getParameter("start_time"));
     this.addParam("endDate", request.getParameter("end_time"));
     this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(
       request.getParameter("excludeQuery")));
     this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
       request.getParameter("includeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("includeQuery")));
   }
 
   /**
@@ -221,9 +220,9 @@ public class SearchCriteria {
    */
   public void addParam(String name, Object value) {
     String solrValue = PropertiesUtil.getProperty(name);
-    if (solrValue == null || solrValue.isEmpty())
+    if (solrValue == null || solrValue.isEmpty()){
       paramList.put(name, value);
-    else {
+    }else {
       try {
         String propertyFieldMappings[] = solrValue.split(",");
         HashMap<String, String> propertyFieldValue = new HashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
index e8db862..78fea31 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
@@ -26,7 +26,7 @@ public class UserSessionInfo implements Serializable {
 
   private static final long serialVersionUID = 1L;
 
-  User user;
+  private User user;
 
   public User getUser() {
     return user;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 990ad00..42d836c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
+import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -29,6 +30,10 @@ import org.springframework.stereotype.Component;
 public class AuditSolrDao extends SolrDaoBase {
 
   static private Logger logger = Logger.getLogger(AuditSolrDao.class);
+  
+  public AuditSolrDao() {
+    super(LOG_TYPE.AUDIT);
+  }
 
   @PostConstruct
   public void postConstructor() {


[3/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
index 348ca4a..8dd0fb9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
@@ -37,7 +37,6 @@ import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VHost;
 import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.ambari.logsearch.view.VSummary;
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
@@ -51,108 +50,141 @@ public class BizUtil {
 
   @Autowired
   RESTErrorUtil restErrorUtil;
+  
+  @Autowired
+  StringUtil stringUtil;
 
   public String convertObjectToNormalText(SolrDocumentList docList) {
     String textToSave = "";
     HashMap<String, String> blankFieldsMap = new HashMap<String, String>();
+    if(docList == null){
+      return "no data";
+    }
     if (docList.isEmpty()) {
       return "no data";
     }
     SolrDocument docForBlankCaculation = docList.get(0);
+    if(docForBlankCaculation == null){
+      return "no data";
+    }
     Collection<String> fieldsForBlankCaculation = docForBlankCaculation
       .getFieldNames();
 
     int maxLengthOfField = 0;
-    for (String field : fieldsForBlankCaculation) {
-      if (field.length() > maxLengthOfField)
+    if(fieldsForBlankCaculation == null){
+      return "no data";
+    }
+    for (String field : fieldsForBlankCaculation) {  
+      if (!stringUtil.isEmpty(field) && field.length() > maxLengthOfField){
         maxLengthOfField = field.length();
+      }
     }
 
     for (String field : fieldsForBlankCaculation) {
+      if(!stringUtil.isEmpty(field)){
       blankFieldsMap
         .put(field,
           addBlanksToString(
             maxLengthOfField - field.length(), field));
-    }
+      }
+    }   
 
     for (SolrDocument doc : docList) {
+      if (doc != null) {
+        StringBuffer textTowrite = new StringBuffer();
 
-      StringBuffer textTowrite = new StringBuffer();
-
-      if (doc.getFieldValue(LogSearchConstants.LOGTIME) != null) {
-        textTowrite.append(doc
-          .getFieldValue(LogSearchConstants.LOGTIME).toString()
-          + " ");
-      }
-      if (doc.getFieldValue(LogSearchConstants.SOLR_LEVEL) != null) {
-        textTowrite.append(
-          doc.getFieldValue(LogSearchConstants.SOLR_LEVEL)
-            .toString()).append(" ");
-      }
-      if (doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME) != null) {
-        textTowrite.append(
-          doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME)
-            .toString().trim()).append(" ");
-      }
-      if (doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME) != null) {
-        textTowrite.append(
-          doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME)
-            .toString().trim()).append(" ");
-      }
-      if (doc.getFieldValue(LogSearchConstants.SOLR_FILE) != null
-        && doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER) != null) {
-        textTowrite
-          .append(doc.getFieldValue(LogSearchConstants.SOLR_FILE)
-            .toString())
-          .append(":")
-          .append(doc.getFieldValue(
-            LogSearchConstants.SOLR_LINE_NUMBER).toString())
-          .append(" ");
-      }
-      if (doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE) != null) {
-        textTowrite.append("- ").append(
-          doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE)
-            .toString());
-      }
-      textTowrite.append("\n");
-      if (textTowrite != null)
+        if (doc.getFieldValue(LogSearchConstants.LOGTIME) != null) {
+          textTowrite.append(doc.getFieldValue(LogSearchConstants.LOGTIME)
+              .toString() + " ");
+        }
+        if (doc.getFieldValue(LogSearchConstants.SOLR_LEVEL) != null) {
+          textTowrite.append(
+              doc.getFieldValue(LogSearchConstants.SOLR_LEVEL).toString())
+              .append(" ");
+        }
+        if (doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME) != null) {
+          textTowrite.append(
+              doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME).toString()
+                  .trim()).append(" ");
+        }
+        if (doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME) != null) {
+          textTowrite.append(
+              doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME).toString()
+                  .trim()).append(" ");
+        }
+        if (doc.getFieldValue(LogSearchConstants.SOLR_FILE) != null
+            && doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER) != null) {
+          textTowrite
+              .append(
+                  doc.getFieldValue(LogSearchConstants.SOLR_FILE).toString())
+              .append(":")
+              .append(
+                  doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER)
+                      .toString()).append(" ");
+        }
+        if (doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE) != null) {
+          textTowrite.append("- ")
+              .append(
+                  doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE)
+                      .toString());
+        }
+        textTowrite.append("\n");
         textToSave += textTowrite.toString();
+      }
     }
     return textToSave;
   }
 
   public VSummary buildSummaryForLogFile(SolrDocumentList docList) {
     VSummary vsummary = new VSummary();
+    if(docList == null || docList.isEmpty()){
+      return vsummary;
+    }
     int numLogs = 0;
     List<VHost> vHosts = new ArrayList<VHost>();
     vsummary.setHosts(vHosts);
     String levels = "";
     for (SolrDocument doc : docList) {
-      // adding Host and Component appropriately
-      String hostname = (String) doc.getFieldValue("host");
-      String comp = (String) doc.getFieldValue("type");
-      String level = (String) doc.getFieldValue("level");
-      boolean newHost = true;
-      for (VHost host : vHosts) {
-        if (host.getName().equals(hostname)) {
-          newHost = false;
-          host.getComponents().add(comp);
-          break;
+      if (doc != null) {
+        // adding Host and Component appropriately
+        String hostname = (String) doc.getFieldValue("host");
+        String comp = (String) doc.getFieldValue("type");
+        String level = (String) doc.getFieldValue("level");
+        if (stringUtil.isEmpty(level)) {
+          level = "";
         }
+        boolean newHost = true;
+        for (VHost host : vHosts) {
+          if (host != null && host.getName().equals(hostname)) {
+            newHost = false;
+            if (stringUtil.isEmpty(comp)) {
+              Set<String> compList = host.getComponents();
+              if (compList != null) {
+                compList.add(comp);
+              }
+            }
+            break;
+          }
+        }
+        if (newHost) {
+          VHost vHost = new VHost();
+          if (!stringUtil.isEmpty(hostname)) {
+            vHost.setName(hostname);
+          }
+          Set<String> component = new LinkedHashSet<String>();
+          if (stringUtil.isEmpty(comp)) {
+            component.add(comp);
+          }
+          vHost.setComponents(component);
+          vHosts.add(vHost);
+        }
+        // getting levels
+        if (!levels.contains(level)) {
+          levels = levels + ", " + level;
+        }
+        numLogs += 1;
       }
-      if (newHost) {
-        VHost vHost = new VHost();
-        vHost.setName(hostname);
-        Set<String> component = new LinkedHashSet<String>();
-        component.add(comp);
-        vHost.setComponents(component);
-        vHosts.add(vHost);
-      }
-      // getting levels
-      if (!levels.contains(level))
-        levels = levels + ", " + level;
-      numLogs += 1;
-    }
+  }
     levels = levels.replaceFirst(", ", "");
     vsummary.setLevels(levels);
     vsummary.setNumberLogs("" + numLogs);
@@ -160,16 +192,19 @@ public class BizUtil {
   }
 
   public String addBlanksToString(int count, String field) {
-    String temp = field;
-    for (int i = 0; i < count; i++) {
-      temp = temp + " ";
+    if (stringUtil.isEmpty(field)) {
+      return field;
+    }
+    if (count > 0) {
+      return String.format("%-" + count + "s", field);
     }
-    return temp;
+    return field;
+
   }
 
   @SuppressWarnings({"unchecked", "rawtypes"})
   public VBarDataList buildSummaryForTopCounts(
-    SimpleOrderedMap<Object> jsonFacetResponse) {
+    SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
 
     VBarDataList vBarDataList = new VBarDataList();
 
@@ -178,7 +213,7 @@ public class BizUtil {
       logger.info("Solr document list in null");
       return vBarDataList;
     }
-    List<Object> userList = jsonFacetResponse.getAll("Users");
+    List<Object> userList = jsonFacetResponse.getAll(outerJsonKey);
     if (userList.isEmpty()) {
       return vBarDataList;
     }
@@ -190,93 +225,51 @@ public class BizUtil {
     }
     List<SimpleOrderedMap> userUsageList = (List<SimpleOrderedMap>) userMap
       .get("buckets");
-    for (SimpleOrderedMap usageMap : userUsageList) {
-      VBarGraphData vBarGraphData = new VBarGraphData();
-      String userName = (String) usageMap.get("val");
-      vBarGraphData.setName(userName);
-      SimpleOrderedMap repoMap = (SimpleOrderedMap) usageMap.get("Repo");
-      List<VNameValue> componetCountList = new ArrayList<VNameValue>();
-      List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap
-        .get("buckets");
-      for (SimpleOrderedMap repoUsageMap : repoUsageList) {
-        VNameValue componetCount = new VNameValue();
-        if (repoUsageMap.get("val") != null)
-          componetCount.setName(repoUsageMap.get("val").toString());
-        String eventCount = "";
-        if (repoUsageMap.get("eventCount") != null)
-          eventCount = repoUsageMap.get("eventCount").toString();
-        eventCount = eventCount.replace(".0", "");
-        eventCount = eventCount.replace(".00", "");
-
-        componetCount.setValue(eventCount);
-        componetCountList.add(componetCount);
-      }
-      vBarGraphData.setDataCounts(componetCountList);
-      dataList.add(vBarGraphData);
-
-    }
-    vBarDataList.setGraphData(dataList);
-    logger.info("getting graph data");
-
-    return vBarDataList;
-  }
-
-  @SuppressWarnings({"unchecked", "rawtypes"})
-  public VBarDataList buildSummaryForResourceCounts(
-    SimpleOrderedMap<Object> jsonFacetResponse) {
-
-    VBarDataList vBarDataList = new VBarDataList();
-
-    Collection<VBarGraphData> dataList = new ArrayList<VBarGraphData>();
-    if (jsonFacetResponse == null) {
-      logger.info("Solr document list in null");
+    if(userUsageList == null){
       return vBarDataList;
     }
-    List<Object> userList = jsonFacetResponse.getAll("x");
-    if (userList.isEmpty()) {
-      return vBarDataList;
-    }
-    SimpleOrderedMap<Map<String, Object>> userMap = (SimpleOrderedMap<Map<String, Object>>) userList
-      .get(0);
-    if (userMap == null) {
-      logger.info("No top user details found");
-      return vBarDataList;
-    }
-    List<SimpleOrderedMap> userUsageList = (List<SimpleOrderedMap>) userMap
-      .get("buckets");
     for (SimpleOrderedMap usageMap : userUsageList) {
-      VBarGraphData vBarGraphData = new VBarGraphData();
-      String userName = (String) usageMap.get("val");
-      vBarGraphData.setName(userName);
-      SimpleOrderedMap repoMap = (SimpleOrderedMap) usageMap.get("y");
-      List<VNameValue> componetCountList = new ArrayList<VNameValue>();
-      List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap
-        .get("buckets");
-      for (SimpleOrderedMap repoUsageMap : repoUsageList) {
-        VNameValue componetCount = new VNameValue();
-        if (repoUsageMap.get("val") != null)
-          componetCount.setName(repoUsageMap.get("val").toString());
-        String eventCount = "";
-        if (repoUsageMap.get("eventCount") != null)
-          eventCount = repoUsageMap.get("eventCount").toString();
-        eventCount = eventCount.replace(".0", "");
-        eventCount = eventCount.replace(".00", "");
-
-        componetCount.setValue(eventCount);
-        componetCountList.add(componetCount);
-      }
-      vBarGraphData.setDataCounts(componetCountList);
-      dataList.add(vBarGraphData);
+      if (usageMap != null) {
+        VBarGraphData vBarGraphData = new VBarGraphData();
+        String userName = (String) usageMap.get("val");
+        if (!stringUtil.isEmpty(userName)) {
+          vBarGraphData.setName(userName);
+        }
+        SimpleOrderedMap repoMap = (SimpleOrderedMap) usageMap.get(innerJsonKey);
+        List<VNameValue> componetCountList = new ArrayList<VNameValue>();
+        List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap
+            .get("buckets");
+        if (repoMap != null) {
+          for (SimpleOrderedMap repoUsageMap : repoUsageList) {
+            VNameValue componetCount = new VNameValue();
+            if (repoUsageMap.get("val") != null) {
+              componetCount.setName(repoUsageMap.get("val").toString());
+            }
+            String eventCount = "";
+            if (repoUsageMap.get("eventCount") != null) {
+              eventCount = repoUsageMap.get("eventCount").toString();
+            }
+            eventCount = eventCount.replace(".0", "");
+            eventCount = eventCount.replace(".00", "");
 
-    }
+            componetCount.setValue(eventCount);
+            componetCountList.add(componetCount);
+          }
+          vBarGraphData.setDataCounts(componetCountList);
+          dataList.add(vBarGraphData);
+        }
+      }}
     vBarDataList.setGraphData(dataList);
     logger.info("getting graph data");
 
     return vBarDataList;
   }
-
-  public HashMap<String, String> sortHashMapByValuesD(
+  
+  public HashMap<String, String> sortHashMapByValues(
     HashMap<String, String> passedMap) {
+    if(passedMap == null ){
+      return passedMap;
+    }
     HashMap<String, String> sortedMap = new LinkedHashMap<String, String>();
     List<String> mapValues = new ArrayList<String>(passedMap.values());
     HashMap<String, String> invertedKeyValue = new HashMap<String, String>();
@@ -291,7 +284,6 @@ public class BizUtil {
       @SuppressWarnings("rawtypes")
       Map.Entry pair = (Map.Entry) it.next();
       invertedKeyValue.put("" + pair.getValue(), "" + pair.getKey());
-      it.remove();
     }
 
     for (String valueOfKey : mapValues) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
index 036d5d1..36dcc96 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
@@ -55,14 +55,16 @@ public class ConfigUtil {
     if (columnMappingArray != null && columnMappingArray.length > 0) {
       for (String columnMapping : columnMappingArray) {
         String mapping[] = columnMapping.split(":");
-        String solrField = mapping[0];
-        String uiField = mapping[1];
-        String modifiedUIField = getModifiedUIField(uiField);
-        columnMappingMap.put(
-          solrField + LogSearchConstants.SOLR_SUFFIX,
-          modifiedUIField);
-        columnMappingMap.put(modifiedUIField
-          + LogSearchConstants.UI_SUFFIX, solrField);
+        if (mapping.length > 1) {
+          String solrField = mapping[0];
+          String uiField = mapping[1];
+
+          String modifiedUIField = getModifiedUIField(uiField);
+          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX,
+              modifiedUIField);
+          columnMappingMap.put(modifiedUIField + LogSearchConstants.UI_SUFFIX,
+              solrField);
+        }
       }
     }
   }
@@ -71,9 +73,9 @@ public class ConfigUtil {
     String modifiedUIField = "";
     String temp = serviceLogsColumnMapping.get(uiField
       + LogSearchConstants.UI_SUFFIX);
-    if (temp == null)
+    if (temp == null){
       return uiField;
-    else {
+    }else {
       String lastChar = uiField.substring(uiField.length() - 1,
         uiField.length());
       int k = 1;
@@ -118,6 +120,10 @@ public class ConfigUtil {
     try {
       JSONObject jsonObject = new JSONObject(responseString);
       JSONArray jsonArrayList = jsonObject.getJSONArray("fields");
+      
+      if(jsonArrayList == null){
+        return;
+      }
 
       for (int i = 0; i < jsonArrayList.length(); i++) {
         JSONObject explrObject = jsonArrayList.getJSONObject(i);
@@ -167,8 +173,9 @@ public class ConfigUtil {
   private static boolean isExclude(String name, String excludeArray[]) {
     if (excludeArray != null && excludeArray.length > 0) {
       for (String exclude : excludeArray) {
-        if (name.equals(exclude))
+        if (name.equals(exclude)){
           return true;
+        }
       }
     }
     return false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
index 77dd536..7425aaa 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
@@ -46,29 +46,34 @@ public class DateUtil {
 
   }
 
-  public String addOffsetToDate(String date, Long utcOffset,
-                                String dateFormate) {
-    if (date == null || date.equals("")) {
+  public String addOffsetToDate(String date, Long utcOffset, String dateFormat) {
+    if (stringUtil.isEmpty(date)) {
+      logger.debug("input date is empty or null.");
       return null;
     }
     if (utcOffset == null) {
+      logger
+          .debug("Utc offset is null, Return input date without adding offset.");
+      return date;
+    }
+    if (stringUtil.isEmpty(dateFormat)) {
+      logger
+          .debug("dateFormat is null or empty, Return input date without adding offset.");
       return date;
     }
     String retDate = "";
-
     try {
       String modifiedDate = date;
       if (date.contains(".")) {
         modifiedDate = date.replace(".", ",");
       }
-      SimpleDateFormat formatter = new SimpleDateFormat(dateFormate, Locale.ENGLISH);
-      Date startDate = (Date) formatter.parse(modifiedDate);
-      long toWithOffset = getTimeWithOffset(startDate, utcOffset,
-        dateFormate);
+      SimpleDateFormat formatter = new SimpleDateFormat(dateFormat,
+          Locale.ENGLISH);
+      Date startDate = formatter.parse(modifiedDate);
+      long toWithOffset = getTimeWithOffset(startDate, utcOffset, dateFormat);
       Calendar calendar = Calendar.getInstance();
       calendar.setTimeInMillis(toWithOffset);
       retDate = formatter.format(calendar.getTime());
-
     } catch (Exception e) {
       logger.error(e);
     }
@@ -90,7 +95,6 @@ public class DateUtil {
       GregorianCalendar utc = new GregorianCalendar(gmtTimeZone);
       utc.setTimeInMillis(epoh);
       utc.add(Calendar.MILLISECOND, -offset);
-
       return utc.getTime();
     } catch (Exception ex) {
       return null;
@@ -124,6 +128,14 @@ public class DateUtil {
 
     return time;
   }
+  
+  public Date getTodayFromDate() {
+    Calendar c = new GregorianCalendar();
+    c.set(Calendar.HOUR_OF_DAY, 0); 
+    c.set(Calendar.MINUTE, 0);
+    c.set(Calendar.SECOND, 0);
+    return c.getTime();
+  }
 
   public Date addHoursToDate(Date date, int hours) {
     GregorianCalendar greorianCalendar = new GregorianCalendar();
@@ -168,6 +180,9 @@ public class DateUtil {
   }
 
   public String convertDateWithMillisecondsToSolrDate(Date date) {
+    if (date == null) {
+      return "";
+    }
     SimpleDateFormat formatter = new SimpleDateFormat(
       LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
     TimeZone timeZone = TimeZone.getTimeZone("GMT");
@@ -190,12 +205,10 @@ public class DateUtil {
   }
 
   public Date convertStringToDate(String dateString) {
-
     SimpleDateFormat formatter = new SimpleDateFormat(
       LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
     TimeZone timeZone = TimeZone.getTimeZone("GMT");
     formatter.setTimeZone(timeZone);
-
     try {
       return formatter.parse(dateString);
     } catch (ParseException e) {
@@ -203,4 +216,21 @@ public class DateUtil {
     }
     return null;
   }
+  
+  public boolean isDateValid(String value) {
+    if(stringUtil.isEmpty(value)){
+      return false;
+    }
+    Date date = null;
+    try {
+        SimpleDateFormat sdf = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
+        date = sdf.parse(value);
+        if (!value.equals(sdf.format(date))) {
+            date = null;
+        }
+    } catch (Exception ex) {
+      //do nothing
+    }
+    return date != null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
index 7981cb1..ab52b06 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
@@ -29,7 +29,6 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.manager.UserConfigMgr;
 import org.apache.ambari.logsearch.view.VHost;
 import org.apache.ambari.logsearch.view.VSummary;
 import org.apache.log4j.Logger;
@@ -58,7 +57,7 @@ public class FileUtil {
         + vsummary.getTo() + "\n";
 
       List<VHost> hosts = vsummary.getHosts();
-      String blankCharacterForHost = "        ";
+      String blankCharacterForHost = String.format("%-8s", "");
       int numberHost = 0;
       for (VHost host : hosts) {
         numberHost += 1;
@@ -66,24 +65,27 @@ public class FileUtil {
         String c = "";
         Set<String> comp = host.getComponents();
         boolean zonetar = true;
-        for (String component : comp) {
-          if (zonetar) {
-            c = component;
-            zonetar = false;
-          } else {
-            c = c + ", " + component;
+        if (comp != null) {
+          for (String component : comp) {
+            if (zonetar) {
+              c = component;
+              zonetar = false;
+            } else {
+              c = c + ", " + component;
+            }
           }
         }
-        if (numberHost > 9)
-          blankCharacterForHost = "       ";
-        else if (numberHost > 99)
-          blankCharacterForHost = "      ";
-        else if (numberHost > 999)
-          blankCharacterForHost = "     ";
-        else if (numberHost > 9999)
-          blankCharacterForHost = "    ";
-        else if (numberHost > 99999)
-          blankCharacterForHost = "   ";
+        if (numberHost > 9){
+          blankCharacterForHost = String.format("%-7s", blankCharacterForHost);
+        }else if (numberHost > 99){
+          blankCharacterForHost = String.format("%-6s", blankCharacterForHost);
+        }else if (numberHost > 999){
+          blankCharacterForHost = String.format("%-5s", blankCharacterForHost);
+        }else if (numberHost > 9999){
+          blankCharacterForHost = String.format("%-4s", blankCharacterForHost);
+        }else if (numberHost > 99999){
+          blankCharacterForHost = String.format("%-3s", blankCharacterForHost);
+        }
         if (numberHost == 1) {
           mainExportedFile = mainExportedFile + "Host"
             + blankCharacterForHost + "   : " + h + " [" + c
@@ -95,9 +97,9 @@ public class FileUtil {
         }
 
       }
-      mainExportedFile = mainExportedFile + "Levels         : "
+      mainExportedFile = mainExportedFile + "Levels"+String.format("%-9s", blankCharacterForHost)+": "
         + vsummary.getLevels() + "\n";
-      mainExportedFile = mainExportedFile + "Format         : "
+      mainExportedFile = mainExportedFile + "Format"+String.format("%-9s", blankCharacterForHost)+": "
         + vsummary.getFormat() + "\n";
       mainExportedFile = mainExportedFile + "\n";
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
index 417a0b1..8535039 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
@@ -131,6 +131,9 @@ public class JSONUtil {
 
   @SuppressWarnings("unchecked")
   public List<HashMap<String, Object>> jsonToMapObjectList(String jsonStr) {
+    if (stringUtil.isEmpty(jsonStr)) {
+      return null;
+    }
     ObjectMapper mapper = new ObjectMapper();
     try {
       Object tempObject = mapper.readValue(jsonStr,
@@ -211,7 +214,7 @@ public class JSONUtil {
    * @param outputFile
    * @param beautify
    */
-  public void writeJSONInFile(String jsonStr, File outputFile,
+  public synchronized void writeJSONInFile(String jsonStr, File outputFile,
                               boolean beautify) {
     FileWriter fileWriter = null;
     if (outputFile == null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
index 4f65cf6..d1e0a86 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
@@ -19,22 +19,27 @@
 
 package org.apache.ambari.logsearch.util;
 
+import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.springframework.beans.factory.annotation.Autowired;
 
 public class QueryBase {
+  
+  @Autowired
+  StringUtil stringUtil;
 
   //Solr Facet Methods
   public void setFacetField(SolrQuery solrQuery, String facetField) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("facet.field", facetField);
+    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
     setFacetLimit(solrQuery, -1);
   }
 
   public void setJSONFacet(SolrQuery solrQuery, String jsonQuery) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("json.facet", jsonQuery);
+    solrQuery.set(LogSearchConstants.FACET_JSON_FIELD, jsonQuery);
     setFacetLimit(solrQuery, -1);
   }
 
@@ -47,8 +52,8 @@ public class QueryBase {
                             String... hirarchy) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("facet.pivot", hirarchy);
-    solrQuery.set("facet.pivot.mincount", mincount);
+    solrQuery.set(LogSearchConstants.FACET_PIVOT, hirarchy);
+    solrQuery.set(LogSearchConstants.FACET_PIVOT_MINCOUNT, mincount);
     setFacetLimit(solrQuery, -1);
   }
 
@@ -56,11 +61,11 @@ public class QueryBase {
                            String from, String to, String unit) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("facet.date", facetField);
-    solrQuery.set("facet.date.start", from);
-    solrQuery.set("facet.date.end", to);
-    solrQuery.set("facet.date.gap", unit);
-    solrQuery.set("facet.mincount", 0);
+    solrQuery.set(LogSearchConstants.FACET_DATE, facetField);
+    solrQuery.set(LogSearchConstants.FACET_DATE_START, from);
+    solrQuery.set(LogSearchConstants.FACET_DATE_END, to);
+    solrQuery.set(LogSearchConstants.FACET_DATE_GAP, unit);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
     setFacetLimit(solrQuery, -1);
   }
 
@@ -68,11 +73,11 @@ public class QueryBase {
                             String from, String to, String unit) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("facet.range", facetField);
-    solrQuery.set("facet.range.start", from);
-    solrQuery.set("facet.range.end", to);
-    solrQuery.set("facet.range.gap", unit);
-    solrQuery.set("facet.mincount", 0);
+    solrQuery.set(LogSearchConstants.FACET_RANGE, facetField);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_START, from);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_END, to);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_GAP, unit);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
     setFacetLimit(solrQuery, -1);
   }
 
@@ -82,26 +87,28 @@ public class QueryBase {
 
   //Solr Group Mehtods
   public void setGroupField(SolrQuery solrQuery, String groupField, int rows) {
-    solrQuery.set("group", true);
-    solrQuery.set("group.field", groupField);
-    solrQuery.set("group.main", true);
+    solrQuery.set(LogSearchConstants.FACET_GROUP, true);
+    solrQuery.set(LogSearchConstants.FACET_GROUP_FIELD, groupField);
+    solrQuery.set(LogSearchConstants.FACET_GROUP_MAIN, true);
     setRowCount(solrQuery, rows);
   }
 
   //Main Query
   public void setMainQuery(SolrQuery solrQuery, String query) {
     String defalultQuery = "*:*";
-    if (query == null || query.isEmpty())
+    if (stringUtil.isEmpty(query)){
       solrQuery.setQuery(defalultQuery);
-    else
+    }else{
       solrQuery.setQuery(query);
+    }
   }
 
   public void setStart(SolrQuery solrQuery, int start) {
-    if (start > 0) {
+    int defaultStart = 0;
+    if (start > defaultStart) {
       solrQuery.setStart(start);
     } else {
-      solrQuery.setStart(0);
+      solrQuery.setStart(defaultStart);
     }
   }
 
@@ -111,17 +118,21 @@ public class QueryBase {
       solrQuery.setRows(rows);
     } else {
       solrQuery.setRows(0);
-      solrQuery.remove("sort");
+      solrQuery.remove(LogSearchConstants.SORT);
     }
   }
 
   //Solr Facet Methods
-  public void setFacetField(SolrQuery solrQuery, String facetField, int minCount) {
+  public void setFacetFieldWithMincount(SolrQuery solrQuery, String facetField, int minCount) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
-    solrQuery.set("facet.field", facetField);
-    solrQuery.set("facet.mincount", minCount);
+    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, minCount);
     setFacetLimit(solrQuery, -1);
   }
+  
+  public void setFl(SolrQuery solrQuery,String field){
+    solrQuery.set(LogSearchConstants.FL, field);
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
index a3cb855..e64f742 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
@@ -34,9 +34,7 @@ import org.springframework.stereotype.Component;
 @Component
 public class RESTErrorUtil {
 
-  static final Logger logger = Logger.getLogger("org.apache.ambari.logsearch");
-
-  public static final String TRUE = "true";
+  private static Logger logger = Logger.getLogger(RESTErrorUtil.class);
 
   public WebApplicationException createRESTException(VResponse response) {
     return createRESTException(response, HttpServletResponse.SC_BAD_REQUEST);

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
index 6fa513d..21b09d3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
@@ -22,12 +22,17 @@ package org.apache.ambari.logsearch.util;
 import java.util.Collection;
 import java.util.Locale;
 
+import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.log4j.Logger;
+import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class SolrUtil {
   static final Logger logger = Logger.getLogger("org.apache.ambari.logsearch");
+  
+  @Autowired
+  StringUtil stringUtil;
 
   public String setField(String fieldName, String value) {
     if (value == null || value.trim().length() == 0) {
@@ -79,107 +84,199 @@ public class SolrUtil {
   }
 
   /**
-   * @param fuzzyStr
+   * @param wildCard
    * @param string
    * @param searchList
    * @return
    */
-  public String orList(String fieldName, String[] valueList, String fuzzyStr) {
+  public String orList(String fieldName, String[] valueList, String wildCard) {
     if (valueList == null || valueList.length == 0) {
       return "";
     }
-    String expr = "";
+    
+    if(stringUtil.isEmpty(wildCard)){
+      wildCard = "";
+    }
+    
+    StringBuilder expr = new StringBuilder();
     int count = -1;
     for (String value : valueList) {
       count++;
       if (count > 0) {
-        expr += " OR ";
+        expr.append(" OR ");
       }
-      expr += fieldName + ":*" + value + "*";
+      
+      expr.append( fieldName + ":"+ wildCard + value + wildCard);
 
     }
     if (valueList.length == 0) {
-      return expr;
+      return expr.toString();
     } else {
       return "(" + expr + ")";
     }
 
   }
-  
+
   /**
-   * @param fuzzyStr
+   * @param wildCard
    * @param string
    * @param searchList
    * @return
    */
-  public String orList(String fieldName, String[] valueList) {
+  public String andList(String fieldName, String[] valueList, String wildCard) {
     if (valueList == null || valueList.length == 0) {
       return "";
     }
-    String expr = "";
+    
+    if(stringUtil.isEmpty(wildCard)){
+      wildCard = "";
+    }
+    
+    StringBuilder expr = new StringBuilder();
     int count = -1;
     for (String value : valueList) {
       count++;
       if (count > 0) {
-        expr += " OR ";
+        expr.append(" AND ");
       }
-      expr += fieldName + ":" + value;
+      
+      expr.append( fieldName + ":"+ wildCard + value + wildCard);
 
     }
     if (valueList.length == 0) {
-      return expr;
+      return expr.toString();
     } else {
       return "(" + expr + ")";
     }
 
   }
-  
-  
 
   /**
-   * @param fuzzyStr
-   * @param string
-   * @param searchList
+   * Copied from Solr ClientUtils.escapeQueryChars and removed escaping *
+   * 
+   * @param s
    * @return
    */
-  public String andList(String fieldName, String[] valueList, String fuzzyStr) {
-    if (valueList == null || valueList.length == 0) {
-      return "";
+  public String escapeQueryChars(String s) {
+    StringBuilder sb = new StringBuilder();
+    int prev = 0;
+    if (s != null) {
+      for (int i = 0; i < s.length(); i++) {
+        char c = s.charAt(i);
+        int ic = (int)c;
+        if( ic == 10 ) {
+          if( prev != 13) {
+            //Let's insert \r
+            sb.append('\\');
+            sb.append((char)13);
+          }
+        }
+        // Note: Remove || c == '*'
+        // These characters are part of the query syntax and must be escaped
+        if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '('
+            || c == ')' || c == ':' || c == '^' || c == '[' || c == ']'
+            || c == '\"' || c == '{' || c == '}' || c == '~' || c == '?'
+            || c == '|' || c == '&' || c == ';' || c == '/'
+            || Character.isWhitespace(c)) {
+          sb.append('\\');
+        }
+        sb.append(c);
+      }
     }
-    String expr = "";
-    int count = -1;
-    for (String value : valueList) {
-      count++;
-      if (count > 0) {
-        expr += " AND ";
+    return sb.toString();
+  }
+
+  public String escapeForWhiteSpaceTokenizer(String search) {
+    if (search == null) {
+      return null;
+    }
+    String newString = search.trim();
+    String newSearch = escapeQueryChars(newString);
+    boolean isSingleWord = true;
+    for (int i = 0; i < search.length(); i++) {
+      if (Character.isWhitespace(search.charAt(i))) {
+        isSingleWord = false;
       }
-      expr += fieldName + ":*" + value + "*";
     }
-    if (valueList.length == 0) {
-      return expr;
-    } else {
-      return "(" + expr + ")";
+    if (!isSingleWord) {
+      newSearch = "\"" + newSearch + "\"";
     }
 
+    return newSearch;
+  }
+
+  public String escapeForStandardTokenizer(String search) {
+    if (search == null) {
+      return null;
+    }
+    String newString = search.trim();
+    String newSearch = escapeQueryChars(newString);
+    boolean isSingleWord = true;
+    for (int i = 0; i < search.length(); i++) {
+      if (Character.isWhitespace(search.charAt(i))) {
+        isSingleWord = false;
+      }
+    }
+    if (!isSingleWord) {
+      newSearch = "\"" + newSearch + "\"";
+    }
+
+    return newSearch;
+  }
+
+  public String escapeForKeyTokenizer(String search) {
+    if (search.startsWith("*") && search.endsWith("*")
+        && !stringUtil.isEmpty(search)) {
+      // Remove the * from both the sides
+      if (search.length() > 1) {
+        search = search.substring(1, search.length() - 1);
+      }else{
+        //search string have only * 
+        search="";
+      }
+    }
+    // Escape the string
+    search = escapeQueryChars(search);
+
+    // Add the *
+    return "*" + search + "*";
+  }
+
+  /**
+   * This is a special case scenario to handle log_message for wild card
+   * scenarios
+   * 
+   * @param search
+   * @return
+   */
+  public String escapeForLogMessage(String field, String search) {
+    if (search.startsWith("*") && search.endsWith("*")) {
+      field = LogSearchConstants.SOLR_KEY_LOG_MESSAGE;
+      search = escapeForKeyTokenizer(search);
+    } else {
+      // Use whitespace index
+      field = LogSearchConstants.SOLR_LOG_MESSAGE;
+      search = escapeForWhiteSpaceTokenizer(search);
+    }
+    return field + ":" + search;
   }
 
   public String makeSolrSearchString(String search) {
     String newString = search.trim();
     String newSearch = newString.replaceAll(
-        "(?=[]\\[+&|!(){}^~*=$@%?:.\\\\])", "\\\\");
+        "(?=[]\\[+&|!(){},:\"^~/=$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
     newSearch = newSearch.replace("\t", "*");
     newSearch = newSearch.replace("\r", "*");
-    newSearch = newSearch.replace(" ", "\\ ");
     newSearch = newSearch.replace("**", "*");
     newSearch = newSearch.replace("***", "*");
     return "*" + newSearch + "*";
   }
-  
+
   public String makeSolrSearchStringWithoutAsterisk(String search) {
     String newString = search.trim();
     String newSearch = newString.replaceAll(
-        "(?=[]\\[+&|!(){}^\"~=$@%?:.\\\\])", "\\\\");
+        "(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
     newSearch = newSearch.replace("\t", "*");
     newSearch = newSearch.replace("\r", "*");
@@ -190,13 +287,13 @@ public class SolrUtil {
   }
 
   public String makeSearcableString(String search) {
-    if(search == null || search.isEmpty())
+    if (search == null || search.isEmpty()){
       return "";
+    }
     String newSearch = search.replaceAll("[\\t\\n\\r]", " ");
-    newSearch = newSearch.replaceAll(
-        "(?=[]\\[+&|!(){}^~*=$/@%?:.\\\\-])", "\\\\");
+    newSearch = newSearch.replaceAll("(?=[]\\[+&|!(){}^~*=$/@%?:.\\\\-])",
+        "\\\\");
 
     return newSearch.replace(" ", "\\ ");
   }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
index 9a21e6a..de83e7e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
@@ -20,16 +20,12 @@
 package org.apache.ambari.logsearch.util;
 
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class StringUtil {
   
-  static Logger logger = Logger.getLogger(StringUtil.class);
-  
-  @Autowired
-  RESTErrorUtil restErrorUtil;
+  private static  Logger logger = Logger.getLogger(StringUtil.class);
   
   public boolean isEmpty(String str) {
     return str == null || str.trim().length() == 0;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
index ea041dc..ff80e73 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
@@ -41,45 +41,44 @@ public class XMLPropertiesUtil extends DefaultPropertiesPersister {
 
   @Override
   public void loadFromXml(Properties properties, InputStream inputStream)
-    throws IOException {
+      throws IOException {
     try {
       DocumentBuilderFactory xmlDocumentBuilderFactory = DocumentBuilderFactory
-        .newInstance();
+          .newInstance();
       xmlDocumentBuilderFactory.setIgnoringComments(true);
       xmlDocumentBuilderFactory.setNamespaceAware(true);
       DocumentBuilder xmlDocumentBuilder = xmlDocumentBuilderFactory
-        .newDocumentBuilder();
+          .newDocumentBuilder();
       Document xmlDocument = xmlDocumentBuilder.parse(inputStream);
-      xmlDocument.getDocumentElement().normalize();
-
-      NodeList nList = xmlDocument.getElementsByTagName("property");
-
-      for (int temp = 0; temp < nList.getLength(); temp++) {
-
-        Node nNode = nList.item(temp);
-
-        if (nNode.getNodeType() == Node.ELEMENT_NODE) {
-
-          Element eElement = (Element) nNode;
-
-          String propertyName = "";
-          String propertyValue = "";
-          if (eElement.getElementsByTagName("name").item(0) != null) {
-            propertyName = eElement.getElementsByTagName("name")
-              .item(0).getTextContent().trim();
-          }
-          if (eElement.getElementsByTagName("value").item(0) != null) {
-            propertyValue = eElement.getElementsByTagName("value")
-              .item(0).getTextContent().trim();
+      if (xmlDocument != null) {
+        xmlDocument.getDocumentElement().normalize();
+        NodeList nList = xmlDocument.getElementsByTagName("property");
+        if (nList != null) {
+          for (int temp = 0; temp < nList.getLength(); temp++) {
+            Node nNode = nList.item(temp);
+            if (nNode.getNodeType() == Node.ELEMENT_NODE) {
+              Element eElement = (Element) nNode;
+              String propertyName = "";
+              String propertyValue = "";
+              if (eElement.getElementsByTagName("name") != null
+                  && eElement.getElementsByTagName("name").item(0) != null) {
+                propertyName = eElement.getElementsByTagName("name").item(0)
+                    .getTextContent().trim();
+              }
+              if (eElement.getElementsByTagName("value") != null
+                  && eElement.getElementsByTagName("value").item(0) != null) {
+                propertyValue = eElement.getElementsByTagName("value").item(0)
+                    .getTextContent().trim();
+              }
+              if (propertyName != null && !propertyName.isEmpty()) {
+                properties.put(propertyName, propertyValue);
+              }
+            }
           }
-
-          properties.put(propertyName, propertyValue);
-
         }
-        // logger.info("ranger site properties loaded successfully.");
       }
     } catch (Exception e) {
-      logger.error("Error loading : ", e);
+      logger.error("Error loading xml properties ", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
index 9e88bd5..b13946c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
@@ -29,7 +29,11 @@ import javax.xml.bind.annotation.XmlRootElement;
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
 public class VBarDataList {
-  Collection<VBarGraphData> graphData = new ArrayList<VBarGraphData>();
+  protected  Collection<VBarGraphData> graphData;
+
+  public VBarDataList() {
+    graphData = new ArrayList<VBarGraphData>();
+  }
 
   public Collection<VBarGraphData> getGraphData() {
     return graphData;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
index ed04db7..f105478 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
@@ -37,10 +37,11 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 public class VCountList extends VList {
   private static final long serialVersionUID = 1L;
 
-  List<VCount> vCounts = new ArrayList<VCount>();
+  protected List<VCount> vCounts;
 
   public VCountList() {
     super();
+    vCounts = new ArrayList<VCount>();
   }
 
   public VCountList(List<VCount> logList) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
index 25f44fc..5e6c3fa 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
@@ -35,10 +35,11 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 public class VGroupList extends VList {
   private static final long serialVersionUID = 1L;
 
-  SolrDocumentList groupList = new SolrDocumentList();
+  protected SolrDocumentList groupList;
 
   public VGroupList() {
     super();
+    groupList = new SolrDocumentList();
   }
 
   public VGroupList(SolrDocumentList logList) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VHost.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VHost.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VHost.java
index b157adc..da2fbdd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VHost.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VHost.java
@@ -22,8 +22,8 @@ package org.apache.ambari.logsearch.view;
 import java.util.Set;
 
 public class VHost {
-  String name;
-  Set<String> components;
+  protected String name;
+  protected Set<String> components;
 
   public String getName() {
     return name;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
index d8bdbfb..adbd6e0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
@@ -80,10 +80,11 @@ public class VNameValue implements java.io.Serializable {
    *            Value to set member attribute <b>value</b>
    */
   public void setValue(String value) {
-    if(value.contains(".") && (value.contains("e") || value.contains("E")))
+    if(value.contains(".") && (value.contains("e") || value.contains("E"))){
       this.value=getExponentialValueReplaced(value);
-    else
+    }else{
       this.value = value;
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
index dc5c86c..e95249d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
@@ -30,7 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 @XmlAccessorType(XmlAccessType.FIELD)
 public class VNameValueList extends VList {
   private static final long serialVersionUID = 1L;
-  List<VNameValue> vNameValues = new ArrayList<VNameValue>();
+  protected List<VNameValue> vNameValues = new ArrayList<VNameValue>();
 
   public VNameValueList() {
     super();

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
index 0cd10a2..78f32ce 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
@@ -30,7 +30,7 @@ import javax.xml.bind.annotation.XmlRootElement;
 @XmlAccessorType(XmlAccessType.FIELD)
 public class VNodeList extends VList {
   private static final long serialVersionUID = 1L;
-  List<VNode> vNodeList = new ArrayList<VNode>();
+  protected List<VNode> vNodeList = new ArrayList<VNode>();
 
   public List<VNode> getvNodeList() {
     return vNodeList;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
index 193b522..55cc089 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
@@ -35,10 +35,11 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 public class VSolrLogList extends VList {
   private static final long serialVersionUID = 1L;
 
-  SolrDocumentList logList = new SolrDocumentList();
+  protected SolrDocumentList logList;
 
   public VSolrLogList() {
     super();
+    logList = new SolrDocumentList();
   }
 
   public VSolrLogList(SolrDocumentList logList) {
@@ -52,8 +53,9 @@ public class VSolrLogList extends VList {
 
   @Override
   public int getListSize() {
-    if (logList != null)
+    if (logList != null){
       return logList.size();
+    }
     return 0;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummary.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummary.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummary.java
index 92b40b8..9aa696c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummary.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummary.java
@@ -22,14 +22,14 @@ import java.util.List;
 
 public class VSummary {
 
-  List<VHost> hosts;
-  String levels;
-  String format;
-  String numberLogs;
-  String from;
-  String to;
-  String includeString;
-  String excludeString;
+  protected List<VHost> hosts;
+  protected String levels;
+  protected String format;
+  protected String numberLogs;
+  protected String from;
+  protected String to;
+  protected String includeString;
+  protected String excludeString;
   
   public VSummary(){
     includeString = "-";

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
index ad1ff5c..b8606d0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
@@ -23,13 +23,13 @@ import java.util.List;
 
 public class VSummaryCount {
 
-  String level;
+  protected String level;
 
-  List<String> cricticalMsg;
+  protected List<String> cricticalMsg;
 
-  List<String> compName;
+  protected List<String> compName;
 
-  List<Long> countMsg;
+  protected List<Long> countMsg;
 
   public String getLevel() {
     return level;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
index 3d8e262..fcab0b7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
@@ -24,7 +24,7 @@ import java.util.Collection;
 
 public class VSummaryCountList {
 
-  Collection<VSummaryCount> countList = new ArrayList<VSummaryCount>();
+  protected Collection<VSummaryCount> countList = new ArrayList<VSummaryCount>();
 
   public Collection<VSummaryCount> getCountList() {
     return countList;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfig.java
index 075df65..55ec1c0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfig.java
@@ -29,11 +29,11 @@ import javax.xml.bind.annotation.XmlRootElement;
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
 public class VUserConfig {
-  String id;
-  String userName;
 
-  String filterName;
-  String values;
+  protected String id;
+  protected String userName;
+  protected String filterName;
+  protected String values;
   
   List<String> shareNameList;
   String rowType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfigList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfigList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfigList.java
index 3fc9d0d..f6d1662 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfigList.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VUserConfigList.java
@@ -34,8 +34,12 @@ import org.codehaus.jackson.map.annotate.JsonSerialize;
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
 public class VUserConfigList extends VList {
-  String name;
-  Collection<VUserConfig> userConfigList;
+  /**
+   * 
+   */
+  private static final long serialVersionUID = 1L;
+  protected String name;
+  protected Collection<VUserConfig> userConfigList;
 
   public String getName() {
     return name;

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
index c90f96d..9fe82ba 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
@@ -35,7 +35,7 @@ public class LogsearchLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler
     @Override
     public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication)
       throws IOException, ServletException {
-  logger.debug("AtlasLogoutSuccessHandler ::: onLogoutSuccess");
+  logger.debug("LogsearchLogoutSuccessHandler ::: onLogoutSuccess");
 //  super.onLogoutSuccess(request, response, authentication);
   response.sendRedirect("/login.jsp");
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
index ef6a410..fc6a594 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
@@ -27,6 +27,6 @@ public class SpringEventListener implements ApplicationListener<AbstractAuthenti
 
   @Override
   public void onApplicationEvent(AbstractAuthenticationEvent event) {
-    logger.info(" On Application onApplicationEvent  SpringEventListener");
+    logger.trace(" Inside onApplicationEvent  SpringEventListener");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
index cc04821..157fdfc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
@@ -36,9 +36,7 @@ public abstract class LogsearchAbstractAuthenticationProvider implements Authent
 
   protected enum AUTH_METHOD {
     LDAP, FILE, SIMPLE
-  }
-
-  ;
+  };
 
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
index 453db61..f29d08f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
@@ -127,11 +127,11 @@ public class LogsearchAuthenticationProvider extends
    * @return
    */
   public Authentication doAuth(Authentication authentication, AUTH_METHOD authMethod) {
-    if (authMethod.equals(AUTH_METHOD.LDAP) && ldapAuthenticationProvider.isEnable()) {
+    if (authMethod.equals(AUTH_METHOD.LDAP)) {
       authentication = ldapAuthenticationProvider.authenticate(authentication);
-    } else if (authMethod.equals(AUTH_METHOD.FILE) && fileAuthenticationProvider.isEnable()) {
+    } else if (authMethod.equals(AUTH_METHOD.FILE)) {
       authentication = fileAuthenticationProvider.authenticate(authentication);
-    } else if (authMethod.equals(AUTH_METHOD.SIMPLE) && simpleAuthenticationProvider.isEnable()) {
+    } else if (authMethod.equals(AUTH_METHOD.SIMPLE)) {
       authentication = simpleAuthenticationProvider.authenticate(authentication);
     } else {
       logger.error("Invalid authentication method :" + authMethod.name());

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
index 91cc556..a5ff295 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
@@ -50,6 +50,10 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
 
   @Override
   public Authentication authenticate(Authentication authentication) throws AuthenticationException {
+    if (!this.isEnable()) {
+      logger.debug("File auth is disabled.");
+      return authentication;
+    }
     String username = authentication.getName();
     String password = (String) authentication.getCredentials();
     if (stringUtil.isEmpty(username)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
index 9d9f7e4..2212a5a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
@@ -53,6 +53,10 @@ public class LogsearchLdapAuthenticationProvider extends
   @Override
   public Authentication authenticate(Authentication authentication)
     throws AuthenticationException {
+    if (!this.isEnable()) {
+      logger.debug("Ldap auth is disabled");
+      return authentication;
+    }
     try {
       LdapAuthenticationProvider authProvider = loadLdapAuthenticationProvider();
       if (authProvider != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
index 88e41d2..4dfc30a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
@@ -39,6 +39,10 @@ public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuth
 
   @Override
   public Authentication authenticate(Authentication authentication) throws AuthenticationException {
+    if (!this.isEnable()) {
+      logger.debug("Simple auth is disabled");
+      return authentication;
+    }
     String username = authentication.getName();
     String password = (String) authentication.getCredentials();
     username = StringEscapeUtils.unescapeHtml(username);

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/resources/default.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/default.properties b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/default.properties
index 7ce120e..8400cad 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/default.properties
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/default.properties
@@ -35,6 +35,7 @@ servicelog.column.mapping=bundle_id:Bundle Id,thread_name:Thread,log_emessage:me
 auditlog.column.mapping=access:Access Type,reqUser:User,enforcer:Access Enfocer,ip:Client IP
 
 #login method
+#Note: Simple will be supported only if both file and ldap is disabled.
 logsearch.auth.file.enable=true
 logsearch.auth.ldap.enable=false
 logsearch.auth.simple.enable=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
index 02207df..ad96558 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
@@ -18,7 +18,7 @@
 		</layout>
 	</appender>
 
-	<appender name="rolling_file" class="org.apache.log4j.RollingFileAppender">
+	<!-- <appender name="rolling_file" class="org.apache.log4j.RollingFileAppender">
 		<param name="file" value="logs/logsearch-app.log" />
 		<param name="Threshold" value="info" />
 		<param name="append" value="true" />
@@ -51,7 +51,40 @@
 			<param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
 		</layout>
 	</appender>
+ -->
 
+<!-- logsearch appender config for the all above appender -->
+
+ <appender name="rolling_file_json"
+  class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
+  <param name="file" value="logs/logsearch-app.json" />
+  <param name="Threshold" value="info" />
+  <param name="append" value="true" />
+  <param name="maxFileSize" value="10MB" />
+  <param name="maxBackupIndex" value="10" />
+  <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
+</appender>
+
+  <appender name="audit_rolling_file_json"
+    class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
+    <param name="file" value="logs/logsearch-audit.json" />
+    <param name="Threshold" value="info" />
+    <param name="append" value="true" />
+    <param name="maxFileSize" value="10MB" />
+    <param name="maxBackupIndex" value="10" />
+    <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
+  </appender>
+
+  <appender name="performance_analyzer_json"
+    class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
+    <param name="file" value="logs/logsearch-performance.json" />
+    <param name="Threshold" value="info" />
+    <param name="append" value="true" />
+    <param name="maxFileSize" value="10MB" />
+    <param name="maxBackupIndex" value="10" />
+    <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
+  </appender>
+ 
 	<!-- Logs to suppress BEGIN -->
 	<category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
 	  <priority value="error" />
@@ -62,24 +95,28 @@
 	<logger name="org.apache.ambari.logsearch.audit"
 		additivity="true">
 		<priority value="info" />
-		<appender-ref ref="audit_rolling_file" />
+		<!-- <appender-ref ref="audit_rolling_file" />-->
+        <appender-ref ref="audit_rolling_file_json" />
 	</logger>
 
 	<logger name="org.apache.ambari.logsearch.performance"
 		additivity="false">
 	  	<priority value="info" />
-		<appender-ref ref="performance_analyzer" />
+		<!-- <appender-ref ref="performance_analyzer" />-->
+        <appender-ref ref="performance_analyzer_json" />
 	</logger>
 
 	<logger name="org.apache.ambari.logsearch" additivity="false">
 	  	<priority value="info" />
 		<!-- <appender-ref ref="console" /> -->
-		<appender-ref ref="rolling_file" />
+		<!-- <appender-ref ref="rolling_file" />-->
+        <appender-ref ref="rolling_file_json" />
 	</logger>
 
 	<root>
 		<level value="warn" />
 		<!-- <appender-ref ref="console" /> -->
-		<appender-ref ref="rolling_file" />
+		<!--<appender-ref ref="rolling_file" />-->
+        <appender-ref ref="rolling_file_json" />
 	</root>
 </log4j:configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
index 44f3bfd..b5b0b23 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties
@@ -13,6 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+#solr.url=http://host:port/solr
 solr.url=
 
 #Solr Core
@@ -26,11 +27,18 @@ solr.service_logs.replication_factor=1
 #If set, metrics will be sent to Ambari
 #metrics.collector.hosts=example.com
 metrics.collector.hosts=
-
-#Audit log solr url
 auditlog.solr.url=
-#auditlog.solr.core.logs=ranger_audits
+#auditlog.solr.core.logs=collection_name
 auditlog.solr.core.logs=ranger_audits
 solr.audit_logs.split_interval_mins=none
 solr.audit_logs.shards=1
 solr.audit_logs.replication_factor=1
+
+#Logfeeder Settings
+logfeeder.include.default.level=fatal,error,warn
+
+#Authentication settings
+#Note: Simple will be supported only if both file and ldap is disabled.
+logsearch.auth.file.enable=true
+logsearch.auth.ldap.enable=false
+logsearch.auth.simple.enable=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2 b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
index ffe9e0c..82457b7 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
@@ -31,3 +31,8 @@ solr.audit_logs.split_interval_mins={{audit_logs_collection_splits_interval_mins
 solr.audit_logs.shards={{logsearch_numshards}}
 solr.audit_logs.replication_factor={{logsearch_repfactor}}
 
+#Authentication settings
+#Note: Simple will be supported only if both file and ldap is disabled.
+logsearch.auth.file.enable={{logsearch_auth_file_enable}}
+logsearch.auth.ldap.enable={{logsearch_auth_ldap_enable}}
+logsearch.auth.simple.enable={{logsearch_auth_simple_enable}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/run.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/run.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/run.sh
index 256b227..99bc3df 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/run.sh
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/run.sh
@@ -73,6 +73,11 @@ fi
 
 if [ -z "$LOGSEARCH_CONF_DIR" ]; then
   LOGSEARCH_CONF_DIR="/etc/logsearch/conf"
+  if [ ! -d $LOGSEARCH_CONF_DIR ]; then
+      if [ -d $script_dir/classes ]; then
+	  LOGSEARCH_CONF_DIR=$script_dir/classes
+      fi
+  fi
   echo "LOGSEARCH_CONF_DIR not found. Use default: $LOGSEARCH_CONF_DIR"
 fi
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/index.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/index.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/index.html
index 58f751b..88b6c22 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/index.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/index.html
@@ -123,14 +123,14 @@
 
 		</script>
 <!-- 		<script type="text/javascript" src="libs/other/d3/d3.min.js"></script> -->
-<div class="btn-group contextMenuBody" style="display:none;position:absolute;z-index:9999;">
+<div class="btn-group contextMenuBody dropup" style="display:none;position:absolute;z-index:9999;">
 	    <button type="button" class="btn btn-info btn-circle btn-app-sm btn-context dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
 	        <i class="fa fa-info"></i>
 	    </button>
 	    <ul class="dropdown-menu">
 	        <li><a data-id="I" href="javascript:void(0);">Include</a></li>
-	        <li><a data-id="E" href="javascript:void(0);">Exclude</a></li>
-	        <li><a data-id="IA" href="javascript:void(0);">*Include*</a></li>
+	        <li><a data-id="E" href="javascript:void(0);">Exclude</a></li> 
+	     	<li><a data-id="IA" href="javascript:void(0);">*Include*</a></li>
 	        <li><a data-id="EA" href="javascript:void(0);">*Exclude*</a></li>
 	        <li role="separator" class="divider"></li>
 	        <li><a data-id="F" href="javascript:void(0);">Find</a></li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Utils.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Utils.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Utils.js
index 8048ee2..5d55689 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Utils.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Utils.js
@@ -389,33 +389,53 @@ define(['require',
 		options = _.isUndefined(options) ? Opt : options;
 		$.msg(options);
 	};
-
+	var errorShown = false;
 	Utils.defaultErrorHandler = function(model, error) {
-		if (error.status == 404) {
-				// App.rContent.show(new vError({
-				// 	status : error.status
-				// }));
-			} else if (error.status == 401) {
-
-				window.location = 'login.jsp'+window.location.search;
-				// App.rContent.show(new vError({
-				// 	status : error.status
-				// }));
-				
-			}else if (error.status == 419) {
-				window.location = 'login.jsp'+window.location.search;
-
-			}else if (error.status == "0") {
-				var diffTime = (new Date().getTime() - prevNetworkErrorTime);
-				if (diffTime > 3000) {
-					prevNetworkErrorTime = new Date().getTime();
-					Utils.notifyError({
-						content : "Network Connection Failure : "+
-						"It seems you are not connected to the internet. Please check your internet connection and try again" 
-					})
-					
-				}
-			}
+		if (error.status == 500) {
+			try {
+		        if (!errorShown) {
+		            errorShown = true;
+		            Utils.notifyError({
+		                content: "Some issue on server, Please try again later."
+		            });
+		            setTimeout(function() {
+		                errorShown = false;
+		            }, 3000);
+		        }
+		    } catch (e) {}
+		}
+		else if (error.status == 400) {
+		    try {
+		        if (!errorShown) {
+		            errorShown = true;
+		            Utils.notifyError({
+		                content: JSON.parse(error.responseText).msgDesc
+		            });
+		            setTimeout(function() {
+		                errorShown = false;
+		            }, 3000);
+		        }
+		    } catch (e) {}
+		} else if (error.status == 401) {
+		    window.location = 'login.jsp' + window.location.search;
+		    // App.rContent.show(new vError({
+		    // 	status : error.status
+		    // }));
+
+		} else if (error.status == 419) {
+		    window.location = 'login.jsp' + window.location.search;
+
+		} else if (error.status == "0") {
+		    var diffTime = (new Date().getTime() - prevNetworkErrorTime);
+		    if (diffTime > 3000) {
+		        prevNetworkErrorTime = new Date().getTime();
+		        Utils.notifyError({
+		            content: "Network Connection Failure : " +
+		                "It seems you are not connected to the internet. Please check your internet connection and try again"
+		        })
+
+		    }
+		}
 		// require(['views/common/ErrorView','App'],function(vError,App){
 
 		// });

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
index 0603d5f..ba21b65 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
@@ -91,8 +91,11 @@ define(['require',
         if (params.host_name) {
             this.defaultParams['host_name'] = params.host_name;
         }
-        if (params.components_name) {
-            this.defaultParams['components_name'] = params.components_name;
+        if (params.component_name) {
+            this.defaultParams['component_name'] = params.component_name;
+        }
+        if (params.file_name) {
+            this.defaultParams['file_name'] = params.file_name;
         }
         if (startDateString && endDateString) {
             if (params.timezone) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
index 6785f4f..3916bff 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
@@ -118,7 +118,7 @@ define(['require',
                 require(['views/filter/CreateLogfeederFilterView'],function(CreateLogfeederFilter){
                     var view = new CreateLogfeederFilter({});
                     var options = {
-                        title: "Create Filter",
+                        title: "Logfeeder Filter",
                         content: view,
                         viewType: 'Filter',
                         resizable: false,
@@ -154,7 +154,7 @@ define(['require',
                         width: 650,
                         height: 350,
                         beforeClose: function(event, ui) {
-                            that.onDialogClosed();
+                            //that.onDialogClosed();
                         }
                     },options);
 
@@ -166,6 +166,9 @@ define(['require',
                             // dialog.trigger("toggle:okBtn",false);
                         });
                     }
+                    options.content.on("closeDialog",function(){
+                    	that.onDialogClosed();
+                    });
                     dialog.open();
                 });
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/TableLayout.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/TableLayout.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/TableLayout.js
index 5b4229a..de5296c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/TableLayout.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/TableLayout.js
@@ -70,7 +70,7 @@ define(['require',
 			// the number of page handles to show. The sliding window
 			// will automatically show the next set of page handles when
 			// you click next at the end of a window.
-			windowSize: 5, // Default is 10
+			windowSize: 10, // Default is 10
 
 			// Used to multiple windowSize to yield a number of pages to slide,
 			// in the case the number is 5


[6/9] ambari git commit: AMBARI-16034. Incremental changes to LogSearch to bring it up to date in the trunk (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/888faf26/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
index 0388366..1bd9a78 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
@@ -48,9 +48,6 @@ public class LogFileMgr extends MgrBase {
 
   private static Logger logger = Logger.getLogger(LogFileMgr.class);
 
-  private enum LOG_TYPE {
-    SERVICE, AUDIT
-  }
 
   @Autowired
   ServiceLogsSolrDao serviceLogsSolrDao;
@@ -74,11 +71,14 @@ public class LogFileMgr extends MgrBase {
     String host = (String) searchCriteria.getParamValue("host");
     int minCount = 1;// to remove zero count facet
     SolrQuery solrQuery = new SolrQuery();
-    queryGenrator.setMainQuery(solrQuery, null);
-    queryGenrator.setFacetField(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
+    queryGenerator.setMainQuery(solrQuery, null);
+    queryGenerator.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH,
+        minCount);
     // adding filter
-    queryGenrator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, componentName);
-    queryGenrator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, host);
+    queryGenerator.setSingleIncludeFilter(solrQuery,
+        LogSearchConstants.SOLR_COMPONENT, componentName);
+    queryGenerator.setSingleIncludeFilter(solrQuery,
+        LogSearchConstants.SOLR_HOST, host);
     try {
       String logType = (String) searchCriteria.getParamValue("logType");
       if (stringUtil.isEmpty(logType)) {
@@ -90,11 +90,13 @@ public class LogFileMgr extends MgrBase {
       } else if (logType.equalsIgnoreCase(LOG_TYPE.AUDIT.name())) {
         daoMgr = auditSolrDao;
       } else {
-        throw restErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
+        throw restErrorUtil.createRESTException(logType
+            + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
       }
       QueryResponse queryResponse = daoMgr.process(solrQuery);
       if (queryResponse.getFacetField(LogSearchConstants.SOLR_PATH) != null) {
-        FacetField queryFacetField = queryResponse.getFacetField(LogSearchConstants.SOLR_PATH);
+        FacetField queryFacetField = queryResponse
+            .getFacetField(LogSearchConstants.SOLR_PATH);
         if (queryFacetField != null) {
           List<Count> countList = queryFacetField.getValues();
           for (Count count : countList) {
@@ -108,18 +110,15 @@ public class LogFileMgr extends MgrBase {
         }
       }
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error in solr query  :" + e.getLocalizedMessage() + "\n Query :" + solrQuery.toQueryString(),
-        e.getCause());
-      throw restErrorUtil.createRESTException("Please try later.", MessageEnums.ERROR_SYSTEM);
+      logger.error("Error in solr query  :" + e.getLocalizedMessage()
+          + "\n Query :" + solrQuery.toQueryString(), e.getCause());
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     logFileList.setLogFiles(logFiles);
     String jsonStr = "";
-    try {
-      jsonStr = convertObjToString(logFileList);
-    } catch (IOException e) {
-      logger.error(e);
-      throw restErrorUtil.createRESTException("Please try later.", MessageEnums.ERROR_SYSTEM);
-    }
+    jsonStr = convertObjToString(logFileList);
+
     return jsonStr;
   }
 
@@ -137,29 +136,26 @@ public class LogFileMgr extends MgrBase {
     try {
       int tail = Integer.parseInt(tailSize);
       tail = tail > 100 ? 100 : tail;
-      queryGenrator.setMainQuery(logFileTailQuery, null);
-      queryGenrator.setSingleIncludeFilter(logFileTailQuery,
+      queryGenerator.setMainQuery(logFileTailQuery, null);
+      queryGenerator.setSingleIncludeFilter(logFileTailQuery,
         LogSearchConstants.SOLR_HOST, host);
       if (!stringUtil.isEmpty(logFile)) {
-        queryGenrator.setSingleIncludeFilter(logFileTailQuery,
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery,
           LogSearchConstants.SOLR_PATH,
           solrUtil.makeSolrSearchString(logFile));
       } else if (!stringUtil.isEmpty(component)) {
-        queryGenrator.setSingleIncludeFilter(logFileTailQuery,
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery,
           LogSearchConstants.SOLR_COMPONENT, component);
       } else {
         throw restErrorUtil.createRESTException("component or logfile parameter must be present",
           MessageEnums.ERROR_SYSTEM);
       }
 
-      queryGenrator.setRowCount(logFileTailQuery, tail);
-      queryGenrator.setSortOrderDefaultServiceLog(logFileTailQuery, new SearchCriteria());
+      queryGenerator.setRowCount(logFileTailQuery, tail);
+      queryGenerator.setSortOrderDefaultServiceLog(logFileTailQuery, new SearchCriteria());
       VSolrLogList solrLogList = getLogAsPaginationProvided(logFileTailQuery, serviceLogsSolrDao);
       return convertObjToString(solrLogList);
 
-    } catch (SolrException | IOException e) {
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
     } catch (NumberFormatException ne) {
 
       throw restErrorUtil.createRESTException(ne.getMessage(),