You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2016/09/07 23:37:51 UTC

[01/50] [abbrv] ambari git commit: AMBARI-18153. Spelling/Capitalization Change for Log Search Smart Configuration (Dharmesh Makwana via oleewere) [Forced Update!]

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-logsearch d89101eca -> 4c19f4a26 (forced update)


AMBARI-18153. Spelling/Capitalization Change for Log Search Smart Configuration (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/263a2c3e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/263a2c3e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/263a2c3e

Branch: refs/heads/branch-dev-logsearch
Commit: 263a2c3eab7c14798d84c4fc4e11d2279342f0f9
Parents: 9981c0b
Author: oleewere <ol...@gmail.com>
Authored: Thu Aug 18 13:18:46 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:57 2016 +0200

----------------------------------------------------------------------
 .../src/main/webapp/scripts/views/common/Header.js               | 2 +-
 .../LOGSEARCH/0.5.0/configuration/logsearch-properties.xml       | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/263a2c3e/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
index 753a7c3..cb8ca4e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
@@ -168,7 +168,7 @@ define(['require',
                 require(['views/filter/CreateLogfeederFilterView'],function(CreateLogfeederFilter){
                     var view = new CreateLogfeederFilter({});
                     var options = {
-                        title: "Logfeeder Filter",
+                        title: "Log Feeder Log Levels",
                         content: view,
                         viewType: 'Filter',
                         resizable: false,

http://git-wip-us.apache.org/repos/asf/ambari/blob/263a2c3e/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
index 7f134e5..56ff2d1 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
@@ -115,8 +115,8 @@
   <property>
     <name>logsearch.logfeeder.include.default.level</name>
     <value>FATAL,ERROR,WARN</value>
-    <description>Include default Logfeeder log levels for Log Search. Used for bootstrapping the configuration only. (levels: FATAL,ERROR,WARN,INFO,DEBUG,TRACE)</description>
-    <display-name>Logfeeder log levels</display-name>
+    <description>Include default Log Feeder Log Levels for Log Search. Used for bootstrapping the configuration only. (levels: FATAL,ERROR,WARN,INFO,DEBUG,TRACE)</description>
+    <display-name>Log Feeder Log Levels</display-name>
     <value-attributes>
       <editable-only-at-install>true</editable-only-at-install>
     </value-attributes>


[29/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
deleted file mode 100644
index 7a27e1c..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
+++ /dev/null
@@ -1,574 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.LogsMgr;
-import org.apache.ambari.logsearch.view.VCountList;
-import org.apache.ambari.logsearch.view.VNameValueList;
-import org.apache.ambari.logsearch.view.VNodeList;
-import org.apache.commons.lang.StringEscapeUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*;
-
-@Api(value = "service/logs", description = "Service log operations")
-@Path("service/logs")
-@Component
-@Scope("request")
-public class ServiceLogsREST {
-
-  @Autowired
-  LogsMgr logMgr;
-
-  @GET
-  @Produces({"application/json"})
-  @ApiOperation(SEARCH_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIND_D, name = "find", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = SOURCE_LOG_ID_D, name = "sourceLogId", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = KEYWORD_TYPE_D, name = "keywordType", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String searchSolrData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("keyword", StringEscapeUtils.unescapeXml(request.getParameter("find")));
-    searchCriteria.addParam("sourceLogId", request.getParameter("sourceLogId"));
-    searchCriteria.addParam("keywordType",
-      request.getParameter("keywordType"));
-    searchCriteria.addParam("token",
-      request.getParameter("token"));
-    searchCriteria.addParam("isLastPage",request.getParameter("isLastPage"));
-    return logMgr.searchLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/hosts")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOSTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
-  })
-  public String getHosts(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getHosts();
-  }
-
-  @GET
-  @Path("/components")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
-  })
-  public String getComponents(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getComponents();
-  }
-
-  @GET
-  @Path("/aggregated")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AGGREGATED_INFO_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAggregatedInfo(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addRequiredServiceLogsParams(request);
-    return logMgr.getAggregatedInfo(searchCriteria);
-  }
-
-  @GET
-  @Path("/levels/count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VCountList getLogLevelsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getLogLevelCount();
-  }
-
-  @GET
-  @Path("/components/count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENTS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VCountList getComponentsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getComponentsCount();
-  }
-
-  @GET
-  @Path("/hosts/count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOSTS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query")
-  })
-  public VCountList getHostsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    searchCriteria.addParam("excludeQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("excludeQuery")));
-    searchCriteria.addParam("includeQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("includeQuery")));
-    return logMgr.getHostsCount();
-  }
-
-  @GET
-  @Path("/tree")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TREE_EXTENSION_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VNodeList getTreeExtension(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("hostName", request.getParameter("hostName"));
-    return logMgr.getTreeExtension(searchCriteria);
-  }
-
-  @GET
-  @Path("/levels/counts/namevalues")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VNameValueList getLogsLevelCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    return logMgr.getLogsLevelCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/histogram")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HISTOGRAM_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getHistogramData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return logMgr.getHistogramData(searchCriteria);
-  }
-
-  @GET
-  @Path("/request/cancel")
-  @Produces({"application/json"})
-  @ApiOperation(CANCEL_FIND_REQUEST_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
-  })
-  public String cancelFindRequest(@Context HttpServletRequest request) {
-    String uniqueId = request.getParameter("token");
-    return logMgr.cancelFindRequestByDate(uniqueId);
-  }
-
-  @GET
-  @Path("/export")
-  @Produces({"application/json"})
-  @ApiOperation(EXPORT_TO_TEXT_FILE_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UTC_OFFSET_D, name = "utcOffset", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public Response exportToTextFile(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("format", request.getParameter("format"));
-    searchCriteria.addParam("utcOffset", request.getParameter("utcOffset"));
-    return logMgr.exportToTextFile(searchCriteria);
-
-  }
-
-  @GET
-  @Path("/hosts/components")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getHostListByComponent(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("componentName",
-      request.getParameter("componentName"));
-    return logMgr.getHostListByComponent(searchCriteria);
-  }
-
-  @GET
-  @Path("/components/level/counts")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getComponentListWithLevelCounts(
-    @Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    return logMgr.getComponentListWithLevelCounts(searchCriteria);
-  }
-
-  @GET
-  @Path("/solr/boundarydates")
-  @Produces({"application/json"})
-  @ApiOperation(GET_EXTREME_DATES_FOR_BUNDLE_ID_OD)
-  public String getExtremeDatesForBundelId(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam(LogSearchConstants.BUNDLE_ID,
-      request.getParameter("bundle_id"));
-
-    return logMgr.getExtremeDatesForBundelId(searchCriteria);
-
-  }
-
-  @GET
-  @Path("/fields")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOGS_FIELD_NAME_OD)
-  public String getServiceLogsFieldsName() {
-    return logMgr.getServiceLogsFieldsName();
-  }
-
-  @GET
-  @Path("/schema/fields")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD)
-  public String getServiceLogsSchemaFieldsName() {
-    return logMgr.getServiceLogsSchemaFieldsName();
-  }
-
-  @GET
-  @Path("/anygraph")
-  @Produces({"application/json"})
-  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAnyGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
-    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
-    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
-    searchCriteria.addParam("from", request.getParameter("from"));
-    searchCriteria.addParam("to", request.getParameter("to"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return logMgr.getAnyGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/truncated")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AFTER_BEFORE_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D,name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ID_D, name = "id", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = SCROLL_TYPE_D, name = "scrollType", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = NUMBER_ROWS_D, name = "numberRows", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAfterBeforeLogs(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("id", request.getParameter("id"));
-    searchCriteria.addParam("scrollType",
-      request.getParameter("scrollType"));
-    searchCriteria.addParam("numberRows",
-      request.getParameter("numberRows"));
-    return logMgr.getAfterBeforeLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/serviceconfig")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HADOOP_SERVICE_CONFIG_JSON_OD)
-  public String getHadoopServiceConfigJSON() {
-    return logMgr.getHadoopServiceConfigJSON();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
new file mode 100644
index 0000000..5400825
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Response;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceExtremeDatesRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogFileRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.CountDataListResponse;
+import org.apache.ambari.logsearch.model.response.GraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.GroupListResponse;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.model.response.NodeListResponse;
+import org.apache.ambari.logsearch.model.response.ServiceLogResponse;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.ambari.logsearch.manager.ServiceLogsManager;
+import org.apache.ambari.logsearch.query.model.ServiceAnyGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceExtremeDatesCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogExportSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogFileSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogTruncatedSearchCriteria;
+import org.springframework.context.annotation.Scope;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*;
+
+@Api(value = "service/logs", description = "Service log operations")
+@Path("service/logs")
+@Component
+@Scope("request")
+public class ServiceLogsResource {
+
+  @Inject
+  private ServiceLogsManager serviceLogsManager;
+
+  @Inject
+  private ConversionService conversionService;
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(SEARCH_LOGS_OD)
+  public ServiceLogResponse searchSolrData(@BeanParam ServiceLogRequest request) {
+    return serviceLogsManager.searchLogs(conversionService.convert(request, ServiceLogSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/hosts")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_OD)
+  public GroupListResponse getHosts() {
+    return serviceLogsManager.getHosts();
+  }
+
+  @GET
+  @Path("/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_OD)
+  public GroupListResponse getComponents() {
+    return serviceLogsManager.getComponents();
+  }
+
+  @GET
+  @Path("/aggregated")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AGGREGATED_INFO_OD)
+  public GraphDataListResponse getAggregatedInfo(@BeanParam BaseServiceLogRequest request) {
+    return serviceLogsManager.getAggregatedInfo(conversionService.convert(request, CommonSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/levels/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  public CountDataListResponse getLogLevelsCount() {
+    return serviceLogsManager.getLogLevelCount();
+  }
+
+  @GET
+  @Path("/components/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_COUNT_OD)
+  public CountDataListResponse getComponentsCount() {
+    return serviceLogsManager.getComponentsCount();
+  }
+
+  @GET
+  @Path("/hosts/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_COUNT_OD)
+  public CountDataListResponse getHostsCount() {
+    return serviceLogsManager.getHostsCount();
+  }
+
+  @GET
+  @Path("/tree")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TREE_EXTENSION_OD)
+  public NodeListResponse getTreeExtension(@QueryParam("hostName") @ApiParam String hostName, @BeanParam ServiceLogFileRequest request) {
+    SearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
+    searchCriteria.addParam("hostName", hostName); // TODO: use host_name instead - needs UI change
+    return serviceLogsManager.getTreeExtension(searchCriteria);
+  }
+
+  @GET
+  @Path("/levels/counts/namevalues")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  public NameValueDataListResponse getLogsLevelCount(@BeanParam ServiceLogFileRequest request) {
+    return serviceLogsManager.getLogsLevelCount(conversionService.convert(request, ServiceLogFileSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/histogram")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HISTOGRAM_DATA_OD)
+  public BarGraphDataListResponse getHistogramData(@BeanParam ServiceGraphRequest request) {
+    return serviceLogsManager.getHistogramData(conversionService.convert(request, ServiceGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/request/cancel")
+  @Produces({"application/json"})
+  @ApiOperation(CANCEL_FIND_REQUEST_OD)
+  public String cancelFindRequest(@QueryParam("token") @ApiParam String token) {
+    return serviceLogsManager.cancelFindRequestByDate(token);
+  }
+
+  @GET
+  @Path("/export")
+  @Produces({"application/json"})
+  @ApiOperation(EXPORT_TO_TEXT_FILE_OD)
+  public Response exportToTextFile(@BeanParam ServiceLogExportRequest request) {
+    return serviceLogsManager.exportToTextFile(conversionService.convert(request, ServiceLogExportSearchCriteria.class));
+
+  }
+
+  @GET
+  @Path("/hosts/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
+  public NodeListResponse getHostListByComponent(@BeanParam ServiceLogFileRequest request, @QueryParam("componentName") @ApiParam String componentName) {
+    SearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
+    searchCriteria.addParam("componentName", componentName); // TODO: use component_name instead - needs UI change
+    return serviceLogsManager.getHostListByComponent(searchCriteria);
+  }
+
+  @GET
+  @Path("/components/levels/counts")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD)
+  public NodeListResponse getComponentListWithLevelCounts(@BeanParam ServiceLogFileRequest request) {
+    return serviceLogsManager.getComponentListWithLevelCounts(conversionService.convert(request, ServiceLogFileSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/solr/boundarydates")
+  @Produces({"application/json"})
+  @ApiOperation(GET_EXTREME_DATES_FOR_BUNDLE_ID_OD)
+  public NameValueDataListResponse getExtremeDatesForBundelId(@BeanParam ServiceExtremeDatesRequest request) {
+    return serviceLogsManager.getExtremeDatesForBundelId(conversionService.convert(request, ServiceExtremeDatesCriteria.class));
+  }
+
+  @GET
+  @Path("/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_FIELD_NAME_OD)
+  public String getServiceLogsFieldsName() {
+    return serviceLogsManager.getServiceLogsFieldsName();
+  }
+
+  @GET
+  @Path("/schema/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD)
+  public String getServiceLogsSchemaFieldsName() {
+    return serviceLogsManager.getServiceLogsSchemaFieldsName();
+  }
+
+  @GET
+  @Path("/anygraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  public BarGraphDataListResponse getAnyGraphData(@BeanParam ServiceAnyGraphRequest request) {
+    return serviceLogsManager.getAnyGraphData(conversionService.convert(request, ServiceAnyGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/truncated")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AFTER_BEFORE_LOGS_OD)
+  public ServiceLogResponse getAfterBeforeLogs(@BeanParam ServiceLogTruncatedRequest request) {
+    return serviceLogsManager.getAfterBeforeLogs(conversionService.convert(request, ServiceLogTruncatedSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/serviceconfig")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HADOOP_SERVICE_CONFIG_JSON_OD)
+  public String getHadoopServiceConfigJSON() {
+    return serviceLogsManager.getHadoopServiceConfigJSON();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
deleted file mode 100644
index 699dc17..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.UserConfigMgr;
-import org.apache.ambari.logsearch.view.VUserConfig;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigOperationDescriptions.*;
-
-@Api(value = "userconfig", description = "User config operations")
-@Path("userconfig")
-@Component
-@Scope("request")
-public class UserConfigREST {
-
-  @Autowired
-  UserConfigMgr userConfigMgr;
-
-  @POST
-  @Produces({"application/json"})
-  @ApiOperation(SAVE_USER_CONFIG_OD)
-  public String saveUserConfig(VUserConfig vhist) {
-    return userConfigMgr.saveUserConfig(vhist);
-  }
-
-  @PUT
-  @Produces({"application/json"})
-  @ApiOperation(UPDATE_USER_CONFIG_OD)
-  public String updateUserConfig(VUserConfig vhist) {
-    return userConfigMgr.updateUserConfig(vhist);
-  }
-
-  @DELETE
-  @Path("/{id}")
-  @ApiOperation(DELETE_USER_CONFIG_OD)
-  public void deleteUserConfig(@PathParam("id") String id) {
-    userConfigMgr.deleteUserConfig(id);
-  }
-
-  @GET
-  @Produces({"application/json"})
-  @ApiOperation(GET_USER_CONFIG_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = USER_ID_D, name = "userId", paramType = "query", dataType = "string"),
-    @ApiImplicitParam(value = FILTER_NAME_D, name = "filterName", paramType = "query", dataType = "string"),
-    @ApiImplicitParam(value = ROW_TYPE_D, name = "rowType", paramType = "query", dataType = "string")
-  })
-  public String getUserConfig(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam(LogSearchConstants.USER_NAME,
-      request.getParameter("userId"));
-    searchCriteria.addParam(LogSearchConstants.FILTER_NAME,
-      request.getParameter("filterName"));
-    searchCriteria.addParam(LogSearchConstants.ROW_TYPE,
-      request.getParameter("rowType"));
-    return userConfigMgr.getUserConfig(searchCriteria);
-  }
-
-  @GET
-  @Path("/users/filter")
-  @Produces({"application/json"})
-  @ApiOperation(GET_USER_FILTER_OD)
-  public String getUserFilter(@Context HttpServletRequest request) {
-    return userConfigMgr.getUserFilter();
-  }
-
-  @POST
-  @Path("/users/filter")
-  @Produces({"application/json"})
-  @ApiOperation(UPDATE_USER_FILTER_OD)
-  public String createUserFilter(String json) {
-    return userConfigMgr.saveUserFiter(json);
-  }
-
-  @PUT
-  @Path("/users/filter/{id}")
-  @Produces({"application/json"})
-  @ApiOperation(GET_USER_FILTER_BY_ID_OD)
-  public String updateUserFilter(String json) {
-    return userConfigMgr.saveUserFiter(json);
-  }
-
-  @GET
-  @Path("/users/names")
-  @Produces({"application/json"})
-  @ApiOperation(GET_ALL_USER_NAMES_OD)
-  public String getAllUserName() {
-    return userConfigMgr.getAllUserName();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigResource.java
new file mode 100644
index 0000000..fd36978
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigResource.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.manager.UserConfigManager;
+import org.apache.ambari.logsearch.model.request.impl.UserConfigRequest;
+import org.apache.ambari.logsearch.query.model.UserConfigSearchCriteria;
+import org.apache.ambari.logsearch.view.VUserConfig;
+import org.springframework.context.annotation.Scope;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigOperationDescriptions.*;
+
+@Api(value = "userconfig", description = "User config operations")
+@Path("userconfig")
+@Component
+@Scope("request")
+public class UserConfigResource {
+
+  @Inject
+  private UserConfigManager userConfigManager;
+
+  @Inject
+  private ConversionService conversionService;
+
+  @POST
+  @Produces({"application/json"})
+  @ApiOperation(SAVE_USER_CONFIG_OD)
+  public String saveUserConfig(VUserConfig vhist) {
+    return userConfigManager.saveUserConfig(vhist);
+  }
+
+  @PUT
+  @Produces({"application/json"})
+  @ApiOperation(UPDATE_USER_CONFIG_OD)
+  public String updateUserConfig(VUserConfig vhist) {
+    return userConfigManager.updateUserConfig(vhist);
+  }
+
+  @DELETE
+  @Path("/{id}")
+  @ApiOperation(DELETE_USER_CONFIG_OD)
+  public void deleteUserConfig(@PathParam("id") String id) {
+    userConfigManager.deleteUserConfig(id);
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(GET_USER_CONFIG_OD)
+  public String getUserConfig(@BeanParam UserConfigRequest request) {
+    return userConfigManager.getUserConfig(conversionService.convert(request, UserConfigSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/filters")
+  @Produces({"application/json"})
+  @ApiOperation(GET_USER_FILTER_OD)
+  public String getUserFilter() {
+    return userConfigManager.getUserFilter();
+  }
+
+  @POST
+  @Path("/filters")
+  @Produces({"application/json"})
+  @ApiOperation(UPDATE_USER_FILTER_OD)
+  public String createUserFilter(String json) {
+    return userConfigManager.saveUserFiter(json);
+  }
+
+  @PUT
+  @Path("/filters/{id}")
+  @Produces({"application/json"})
+  @ApiOperation(GET_USER_FILTER_BY_ID_OD)
+  public String updateUserFilter(String json) {
+    return userConfigManager.saveUserFiter(json);
+  }
+
+  @GET
+  @Path("/names")
+  @Produces({"application/json"})
+  @ApiOperation(GET_ALL_USER_NAMES_OD)
+  public String getAllUserName() {
+    return userConfigManager.getAllUserName();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/service/UserService.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/service/UserService.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/service/UserService.java
index 4b2b918..ba4431d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/service/UserService.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/service/UserService.java
@@ -18,7 +18,6 @@
  */
 package org.apache.ambari.logsearch.service;
 
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.core.userdetails.UserDetailsService;
 import org.springframework.security.core.userdetails.UsernameNotFoundException;
 import org.springframework.stereotype.Service;
@@ -26,12 +25,14 @@ import org.apache.ambari.logsearch.dao.UserDao;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.log4j.Logger;
 
+import javax.inject.Inject;
+
 
 @Service
 public class UserService implements UserDetailsService {
   private static final Logger logger = Logger.getLogger(UserService.class);
 
-  @Autowired
+  @Inject
   private UserDao userDao;
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
new file mode 100644
index 0000000..a3f59f7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
@@ -0,0 +1,339 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.solr.model;
+
+import org.apache.ambari.logsearch.model.response.AuditLogData;
+import org.apache.solr.client.solrj.beans.Field;
+
+import java.util.Date;
+import java.util.List;
+
+public class SolrAuditLogData extends SolrCommonLogData implements AuditLogData {
+
+  @Field("logType")
+  private String logType;
+
+  @Field("policy")
+  private String policy;
+
+  @Field("access")
+  private String access;
+
+  @Field("action")
+  private String action;
+
+  @Field("agent")
+  private String agent;
+
+  @Field("agentHost")
+  private String agentHost;
+
+  @Field("cliIP")
+  private String clientIp;
+
+  @Field("cliType")
+  private String clientType;
+
+  @Field("reqContext")
+  private String requestContext;
+
+  @Field("enforcer")
+  private String enforcer;
+
+  @Field("evtTime")
+  private Date eventTime;
+
+  @Field("reason")
+  private String reason;
+
+  @Field("proxyUsers")
+  private List<String> proxyUsers;
+
+  @Field("repo")
+  private String repo;
+
+  @Field("repoType")
+  private String repoType;
+
+  @Field("reqData")
+  private String requestData;
+
+  @Field("reqUser")
+  private String requestUser;
+
+  @Field("resType")
+  private String responseType;
+
+  @Field("resource")
+  private String resource;
+
+  @Field("result")
+  private Integer result;
+
+  @Field("sess")
+  private String session;
+
+  @Field("tags")
+  private List<String> tags;
+
+  @Field("tags_str")
+  private String tagsStr;
+
+  @Field("text")
+  private String text;
+
+  @Override
+  public String getText() {
+    return text;
+  }
+
+  @Override
+  public void setText(String text) {
+    this.text = text;
+  }
+
+  @Override
+  public String getTagsStr() {
+    return tagsStr;
+  }
+
+  @Override
+  public void setTagsStr(String tagsStr) {
+    this.tagsStr = tagsStr;
+  }
+
+  @Override
+  public List<String> getTags() {
+    return tags;
+  }
+
+  @Override
+  public void setTags(List<String> tags) {
+    this.tags = tags;
+  }
+
+  @Override
+  public String getSession() {
+    return session;
+  }
+
+  @Override
+  public void setSession(String session) {
+    this.session = session;
+  }
+
+  @Override
+  public Integer getResult() {
+    return result;
+  }
+
+  @Override
+  public void setResult(Integer result) {
+    this.result = result;
+  }
+
+  @Override
+  public String getResource() {
+    return resource;
+  }
+
+  @Override
+  public void setResource(String resource) {
+    this.resource = resource;
+  }
+
+  @Override
+  public String getResponseType() {
+    return responseType;
+  }
+
+  public void setResponseType(String responseType) {
+    this.responseType = responseType;
+  }
+
+  @Override
+  public String getRequestUser() {
+    return requestUser;
+  }
+
+  @Override
+  public void setRequestUser(String requestUser) {
+    this.requestUser = requestUser;
+  }
+
+  @Override
+  public String getRequestData() {
+    return requestData;
+  }
+
+  @Override
+  public void setRequestData(String requestData) {
+    this.requestData = requestData;
+  }
+
+  @Override
+  public String getRepoType() {
+    return repoType;
+  }
+
+  @Override
+  public void setRepoType(String repoType) {
+    this.repoType = repoType;
+  }
+
+  @Override
+  public String getRepo() {
+    return repo;
+  }
+
+  @Override
+  public void setRepo(String repo) {
+    this.repo = repo;
+  }
+
+  @Override
+  public List<String> getProxyUsers() {
+    return proxyUsers;
+  }
+
+  @Override
+  public void setProxyUsers(List<String> proxyUsers) {
+    this.proxyUsers = proxyUsers;
+  }
+
+  @Override
+  public String getReason() {
+    return reason;
+  }
+
+  @Override
+  public void setReason(String reason) {
+    this.reason = reason;
+  }
+
+  @Override
+  public Date getEventTime() {
+    return eventTime;
+  }
+
+  @Override
+  public void setEventTime(Date eventTime) {
+    this.eventTime = eventTime;
+  }
+
+  @Override
+  public String getEnforcer() {
+    return enforcer;
+  }
+
+  @Override
+  public void setEnforcer(String enforcer) {
+    this.enforcer = enforcer;
+  }
+
+  @Override
+  public String getRequestContext() {
+    return requestContext;
+  }
+
+  @Override
+  public void setRequestContext(String requestContext) {
+    this.requestContext = requestContext;
+  }
+
+  @Override
+  public String getClientType() {
+    return clientType;
+  }
+
+  @Override
+  public void setClientType(String clientType) {
+    this.clientType = clientType;
+  }
+
+  @Override
+  public String getClientIp() {
+    return clientIp;
+  }
+
+  @Override
+  public void setClientIp(String clientIp) {
+    this.clientIp = clientIp;
+  }
+
+  @Override
+  public String getAgent() {
+    return agent;
+  }
+
+  @Override
+  public void setAgent(String agent) {
+    this.agent = agent;
+  }
+
+  @Override
+  public String getAgentHost() {
+    return agentHost;
+  }
+
+  @Override
+  public void setAgentHost(String agentHost) {
+    this.agentHost = agentHost;
+  }
+
+  @Override
+  public String getAction() {
+    return action;
+  }
+
+  @Override
+  public void setAction(String action) {
+    this.action = action;
+  }
+
+  @Override
+  public String getAccess() {
+    return access;
+  }
+
+  @Override
+  public void setAccess(String access) {
+    this.access = access;
+  }
+
+  @Override
+  public String getPolicy() {
+    return policy;
+  }
+
+  @Override
+  public void setPolicy(String policy) {
+    this.policy = policy;
+  }
+
+  @Override
+  public String getLogType() {
+    return logType;
+  }
+
+  @Override
+  public void setLogType(String logType) {
+    this.logType = logType;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
new file mode 100644
index 0000000..d68279d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
@@ -0,0 +1,248 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.solr.model;
+
+import org.apache.ambari.logsearch.model.response.CommonLogData;
+import org.apache.solr.client.solrj.beans.Field;
+
+import java.util.Date;
+
+public class SolrCommonLogData implements CommonLogData {
+
+  @Field("id")
+  private String id;
+
+  @Field("bundle_id")
+  private String bundleId;
+
+  @Field("case_id")
+  private String caseId;
+
+  @Field("cluster")
+  private String cluster;
+
+  @Field("seq_num")
+  private Long seqNum;
+
+  @Field("log_message")
+  private String logMessage;
+
+  @Field("logfile_line_number")
+  private Integer logFileLineNumber;
+
+  @Field("event_dur_m5")
+  private Long eventDurationMs;
+
+  @Field("file")
+  private String file;
+
+  @Field("type")
+  private String type;
+
+  @Field("event_count")
+  private Long eventCount;
+
+  @Field("event_md5")
+  private String eventMd5;
+
+  @Field("message_md5")
+  private String messageMd5;
+
+  @Field("_ttl_")
+  private String ttl;
+
+  @Field("_expire_at_")
+  private Date expire;
+
+  @Field("_version_")
+  private Long version;
+
+  @Field("_router_field_")
+  private Integer routerField;
+
+  @Override
+  public String getId() {
+    return this.id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getCaseId() {
+    return this.caseId;
+  }
+
+  @Override
+  public void setCaseId(String caseId) {
+    this.caseId = caseId;
+  }
+
+  @Override
+  public String getLogMessage() {
+    return this.logMessage;
+  }
+
+  @Override
+  public String getBundleId() {
+    return bundleId;
+  }
+
+  @Override
+  public void setBundleId(String bundleId) {
+    this.bundleId = bundleId;
+  }
+
+  @Override
+  public Integer getLogFileLineNumber() {
+    return logFileLineNumber;
+  }
+
+  @Override
+  public void setLogFileLineNumber(Integer logFileLineNumber) {
+    this.logFileLineNumber = logFileLineNumber;
+  }
+
+  @Override
+  public void setLogMessage(String logMessage) {
+    this.logMessage = logMessage;
+  }
+
+  @Override
+  public Long getEventDurationMs() {
+    return eventDurationMs;
+  }
+
+  @Override
+  public void setEventDurationMs(Long eventDurationMs) {
+    this.eventDurationMs = eventDurationMs;
+  }
+
+  @Override
+  public String getFile() {
+    return file;
+  }
+
+  @Override
+  public void setFile(String file) {
+    this.file = file;
+  }
+
+  @Override
+  public Long getSeqNum() {
+    return seqNum;
+  }
+
+  @Override
+  public void setSeqNum(Long seqNum) {
+    this.seqNum = seqNum;
+  }
+
+  @Override
+  public String getMessageMd5() {
+    return messageMd5;
+  }
+
+  @Override
+  public void setMessageMd5(String messageMd5) {
+    this.messageMd5 = messageMd5;
+  }
+
+  @Override
+  public String getEventMd5() {
+    return eventMd5;
+  }
+
+  @Override
+  public void setEventMd5(String eventMd5) {
+    this.eventMd5 = eventMd5;
+  }
+
+  @Override
+  public String getCluster() {
+    return cluster;
+  }
+
+  @Override
+  public void setCluster(String cluster) {
+    this.cluster = cluster;
+  }
+
+  @Override
+  public Long getEventCount() {
+    return eventCount;
+  }
+
+  @Override
+  public void setEventCount(Long eventCount) {
+    this.eventCount = eventCount;
+  }
+
+  @Override
+  public String getTtl() {
+    return this.ttl;
+  }
+
+  @Override
+  public void setTtl(String ttl) {
+    this.ttl = ttl;
+  }
+
+  @Override
+  public Date getExpire() {
+    return expire;
+  }
+
+  @Override
+  public void setExpire(Date expire) {
+    this.expire = expire;
+  }
+
+  @Override
+  public Long getVersion() {
+    return version;
+  }
+
+  @Override
+  public void setVersion(Long version) {
+    this.version = version;
+  }
+
+  @Override
+  public Integer getRouterField() {
+    return this.routerField;
+  }
+
+  @Override
+  public void setRouterField(Integer routerField) {
+    this.routerField = routerField;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public void setType(String type) {
+    this.type = type;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java
new file mode 100644
index 0000000..988c878
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrComponentTypeLogData.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.solr.model;
+
+import org.apache.ambari.logsearch.model.response.ComponentTypeLogData;
+import org.apache.solr.client.solrj.beans.Field;
+
+public class SolrComponentTypeLogData implements ComponentTypeLogData {
+
+  @Field("type")
+  private String type;
+
+  @Override
+  public String getType() {
+    return this.type;
+  }
+
+  @Override
+  public void setType(String type) {
+    this.type = type;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java
new file mode 100644
index 0000000..215b22c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrHostLogData.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.solr.model;
+
+import org.apache.ambari.logsearch.model.response.HostLogData;
+import org.apache.solr.client.solrj.beans.Field;
+
+public class SolrHostLogData implements HostLogData {
+
+  @Field("host")
+  private String host;
+
+  @Override
+  public String getHost() {
+    return host;
+  }
+
+  @Override
+  public void setHost(String host) {
+    this.host = host;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
new file mode 100644
index 0000000..890dc7d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.solr.model;
+
+import org.apache.ambari.logsearch.model.response.ServiceLogData;
+import org.apache.solr.client.solrj.beans.Field;
+
+import java.util.Date;
+
+public class SolrServiceLogData extends SolrCommonLogData implements ServiceLogData {
+
+  @Field("level")
+  private String level;
+
+  @Field("line_number")
+  private Integer lineNumber;
+
+  @Field("logtime")
+  private Date logTime;
+
+  @Field("type")
+  private String type;
+
+  @Field("ip")
+  private String ip;
+
+  @Field("path")
+  private String path;
+
+  @Field("host")
+  private String host;
+
+  @Override
+  public String getPath() {
+    return path;
+  }
+
+  @Override
+  public void setPath(String path) {
+    this.path = path;
+  }
+
+  @Override
+  public String getIp() {
+    return ip;
+  }
+
+  @Override
+  public void setIp(String ip) {
+    this.ip = ip;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  @Override
+  public String getHost() {
+    return host;
+  }
+
+  @Override
+  public void setHost(String host) {
+    this.host = host;
+  }
+
+  @Override
+  public Date getLogTime() {
+    return logTime;
+  }
+
+  @Override
+  public void setLogTime(Date logTime) {
+    this.logTime = logTime;
+  }
+
+  @Override
+  public Integer getLineNumber() {
+    return lineNumber;
+  }
+
+  @Override
+  public void setLineNumber(Integer lineNumber) {
+    this.lineNumber = lineNumber;
+  }
+
+  @Override
+  public String getLevel() {
+    return level;
+  }
+
+  @Override
+  public void setLevel(String level) {
+    this.level = level;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
index bd6cfbb..637a4d7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
@@ -32,10 +32,10 @@ import java.util.Map.Entry;
 import java.util.Set;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.view.VBarDataList;
-import org.apache.ambari.logsearch.view.VBarGraphData;
+import org.apache.ambari.logsearch.model.response.BarGraphData;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.NameValueData;
 import org.apache.ambari.logsearch.view.VHost;
-import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.ambari.logsearch.view.VSummary;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
@@ -173,41 +173,41 @@ public class BizUtil {
   }
 
   @SuppressWarnings({"unchecked", "rawtypes"})
-  public static VBarDataList buildSummaryForTopCounts(SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
+  public static BarGraphDataListResponse buildSummaryForTopCounts(SimpleOrderedMap<Object> jsonFacetResponse, String innerJsonKey, String outerJsonKey) {
 
-    VBarDataList vBarDataList = new VBarDataList();
+    BarGraphDataListResponse barGraphDataListResponse = new BarGraphDataListResponse();
 
-    Collection<VBarGraphData> dataList = new ArrayList<VBarGraphData>();
+    Collection<BarGraphData> dataList = new ArrayList<>();
     if (jsonFacetResponse == null) {
       logger.info("Solr document list in null");
-      return vBarDataList;
+      return barGraphDataListResponse;
     }
     List<Object> userList = jsonFacetResponse.getAll(outerJsonKey);
     if (userList.isEmpty()) {
-      return vBarDataList;
+      return barGraphDataListResponse;
     }
     SimpleOrderedMap<Map<String, Object>> userMap = (SimpleOrderedMap<Map<String, Object>>) userList.get(0);
     if (userMap == null) {
       logger.info("No top user details found");
-      return vBarDataList;
+      return barGraphDataListResponse;
     }
     List<SimpleOrderedMap> userUsageList = (List<SimpleOrderedMap>) userMap.get("buckets");
     if(userUsageList == null){
-      return vBarDataList;
+      return barGraphDataListResponse;
     }
     for (SimpleOrderedMap usageMap : userUsageList) {
       if (usageMap != null) {
-        VBarGraphData vBarGraphData = new VBarGraphData();
+        BarGraphData barGraphData = new BarGraphData();
         String userName = (String) usageMap.get("val");
         if (!StringUtils.isBlank(userName)) {
-          vBarGraphData.setName(userName);
+          barGraphData.setName(userName);
         }
         SimpleOrderedMap repoMap = (SimpleOrderedMap) usageMap.get(innerJsonKey);
-        List<VNameValue> componetCountList = new ArrayList<VNameValue>();
-        List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap.get("buckets");
+        List<NameValueData> componetCountList = new ArrayList<NameValueData>();
         if (repoMap != null) {
+          List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap.get("buckets");
           for (SimpleOrderedMap repoUsageMap : repoUsageList) {
-            VNameValue componetCount = new VNameValue();
+            NameValueData componetCount = new NameValueData();
             if (repoUsageMap.get("val") != null) {
               componetCount.setName(repoUsageMap.get("val").toString());
             }
@@ -221,19 +221,19 @@ public class BizUtil {
             componetCount.setValue(eventCount);
             componetCountList.add(componetCount);
           }
-          vBarGraphData.setDataCounts(componetCountList);
-          dataList.add(vBarGraphData);
+          barGraphData.setDataCount(componetCountList);
+          dataList.add(barGraphData);
         }
       }}
-    vBarDataList.setGraphData(dataList);
+    barGraphDataListResponse.setGraphData(dataList);
     logger.info("getting graph data");
 
-    return vBarDataList;
+    return barGraphDataListResponse;
   }
   
   public static HashMap<String, String> sortHashMapByValues(HashMap<String, String> passedMap) {
     if (passedMap == null ) {
-      return passedMap;
+      return null;
     }
     HashMap<String, String> sortedMap = new LinkedHashMap<String, String>();
     List<String> mapValues = new ArrayList<String>(passedMap.values());

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
deleted file mode 100644
index b13946c..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarDataList.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VBarDataList {
-  protected  Collection<VBarGraphData> graphData;
-
-  public VBarDataList() {
-    graphData = new ArrayList<VBarGraphData>();
-  }
-
-  public Collection<VBarGraphData> getGraphData() {
-    return graphData;
-  }
-
-  public void setGraphData(Collection<VBarGraphData> histogramData) {
-    this.graphData = histogramData;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarGraphData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarGraphData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarGraphData.java
deleted file mode 100644
index 50fe47e..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VBarGraphData.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.Collection;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VBarGraphData {
-  protected Collection<VNameValue> dataCount = null;
-  protected String name;
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String level) {
-    this.name = level;
-  }
-
-  public Collection<VNameValue> getDataCount() {
-    return dataCount;
-  }
-
-  public void setDataCounts(Collection<VNameValue> dateValueCounts) {
-    this.dataCount = dateValueCounts;
-  }
-
-}


[39/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/production/r.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/production/r.js b/ambari-logsearch/ambari-logsearch-portal/production/r.js
deleted file mode 100644
index 6e6ffd0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/production/r.js
+++ /dev/null
@@ -1,32140 +0,0 @@
-/**
- * @license r.js 2.1.22 Copyright (c) 2010-2015, The Dojo Foundation All Rights Reserved.
- * Available via the MIT or new BSD license.
- * see: http://github.com/jrburke/requirejs for details
- */
-
-/*
- * This is a bootstrap script to allow running RequireJS in the command line
- * in either a Java/Rhino or Node environment. It is modified by the top-level
- * dist.js file to inject other files to completely enable this file. It is
- * the shell of the r.js file.
- */
-
-/*jslint evil: true, nomen: true, sloppy: true */
-/*global readFile: true, process: false, Packages: false, print: false,
-console: false, java: false, module: false, requirejsVars, navigator,
-document, importScripts, self, location, Components, FileUtils */
-
-var requirejs, require, define, xpcUtil;
-(function (console, args, readFileFunc) {
-    var fileName, env, fs, vm, path, exec, rhinoContext, dir, nodeRequire,
-        nodeDefine, exists, reqMain, loadedOptimizedLib, existsForNode, Cc, Ci,
-        version = '2.1.22',
-        jsSuffixRegExp = /\.js$/,
-        commandOption = '',
-        useLibLoaded = {},
-        //Used by jslib/rhino/args.js
-        rhinoArgs = args,
-        //Used by jslib/xpconnect/args.js
-        xpconnectArgs = args,
-        readFile = typeof readFileFunc !== 'undefined' ? readFileFunc : null;
-
-    function showHelp() {
-        console.log('See https://github.com/jrburke/r.js for usage.');
-    }
-
-    if ((typeof navigator !== 'undefined' && typeof document !== 'undefined') ||
-            (typeof importScripts !== 'undefined' && typeof self !== 'undefined')) {
-        env = 'browser';
-
-        readFile = function (path) {
-            return fs.readFileSync(path, 'utf8');
-        };
-
-        exec = function (string) {
-            return eval(string);
-        };
-
-        exists = function () {
-            console.log('x.js exists not applicable in browser env');
-            return false;
-        };
-
-    } else if (typeof process !== 'undefined' && process.versions && !!process.versions.node) {
-        env = 'node';
-
-        //Get the fs module via Node's require before it
-        //gets replaced. Used in require/node.js
-        fs = require('fs');
-        vm = require('vm');
-        path = require('path');
-        //In Node 0.7+ existsSync is on fs.
-        existsForNode = fs.existsSync || path.existsSync;
-
-        nodeRequire = require;
-        nodeDefine = define;
-        reqMain = require.main;
-
-        //Temporarily hide require and define to allow require.js to define
-        //them.
-        require = undefined;
-        define = undefined;
-
-        readFile = function (path) {
-            return fs.readFileSync(path, 'utf8');
-        };
-
-        exec = function (string, name) {
-            return vm.runInThisContext(this.requirejsVars.require.makeNodeWrapper(string),
-                                       name ? fs.realpathSync(name) : '');
-        };
-
-        exists = function (fileName) {
-            return existsForNode(fileName);
-        };
-
-
-        fileName = process.argv[2];
-
-        if (fileName && fileName.indexOf('-') === 0) {
-            commandOption = fileName.substring(1);
-            fileName = process.argv[3];
-        }
-    } else if (typeof Packages !== 'undefined') {
-        env = 'rhino';
-
-        fileName = args[0];
-
-        if (fileName && fileName.indexOf('-') === 0) {
-            commandOption = fileName.substring(1);
-            fileName = args[1];
-        }
-
-        //Exec/readFile differs between Rhino and Nashorn. Rhino has an
-        //importPackage where Nashorn does not, so branch on that. This is a
-        //coarser check -- detecting readFile existence might also be enough for
-        //this spot. However, sticking with importPackage to keep it the same
-        //as other Rhino/Nashorn detection branches.
-        if (typeof importPackage !== 'undefined') {
-            rhinoContext = Packages.org.mozilla.javascript.ContextFactory.getGlobal().enterContext();
-
-            exec = function (string, name) {
-                return rhinoContext.evaluateString(this, string, name, 0, null);
-            };
-        } else {
-            exec = function (string, name) {
-                load({ script: string, name: name});
-            };
-            readFile = readFully;
-        }
-
-        exists = function (fileName) {
-            return (new java.io.File(fileName)).exists();
-        };
-
-        //Define a console.log for easier logging. Don't
-        //get fancy though.
-        if (typeof console === 'undefined') {
-            console = {
-                log: function () {
-                    print.apply(undefined, arguments);
-                }
-            };
-        }
-    } else if (typeof Components !== 'undefined' && Components.classes && Components.interfaces) {
-        env = 'xpconnect';
-
-        Components.utils['import']('resource://gre/modules/FileUtils.jsm');
-        Cc = Components.classes;
-        Ci = Components.interfaces;
-
-        fileName = args[0];
-
-        if (fileName && fileName.indexOf('-') === 0) {
-            commandOption = fileName.substring(1);
-            fileName = args[1];
-        }
-
-        xpcUtil = {
-            isWindows: ('@mozilla.org/windows-registry-key;1' in Cc),
-            cwd: function () {
-                return FileUtils.getFile("CurWorkD", []).path;
-            },
-
-            //Remove . and .. from paths, normalize on front slashes
-            normalize: function (path) {
-                //There has to be an easier way to do this.
-                var i, part, ary,
-                    firstChar = path.charAt(0);
-
-                if (firstChar !== '/' &&
-                        firstChar !== '\\' &&
-                        path.indexOf(':') === -1) {
-                    //A relative path. Use the current working directory.
-                    path = xpcUtil.cwd() + '/' + path;
-                }
-
-                ary = path.replace(/\\/g, '/').split('/');
-
-                for (i = 0; i < ary.length; i += 1) {
-                    part = ary[i];
-                    if (part === '.') {
-                        ary.splice(i, 1);
-                        i -= 1;
-                    } else if (part === '..') {
-                        ary.splice(i - 1, 2);
-                        i -= 2;
-                    }
-                }
-                return ary.join('/');
-            },
-
-            xpfile: function (path) {
-                var fullPath;
-                try {
-                    fullPath = xpcUtil.normalize(path);
-                    if (xpcUtil.isWindows) {
-                        fullPath = fullPath.replace(/\//g, '\\');
-                    }
-                    return new FileUtils.File(fullPath);
-                } catch (e) {
-                    throw new Error((fullPath || path) + ' failed: ' + e);
-                }
-            },
-
-            readFile: function (/*String*/path, /*String?*/encoding) {
-                //A file read function that can deal with BOMs
-                encoding = encoding || "utf-8";
-
-                var inStream, convertStream,
-                    readData = {},
-                    fileObj = xpcUtil.xpfile(path);
-
-                //XPCOM, you so crazy
-                try {
-                    inStream = Cc['@mozilla.org/network/file-input-stream;1']
-                               .createInstance(Ci.nsIFileInputStream);
-                    inStream.init(fileObj, 1, 0, false);
-
-                    convertStream = Cc['@mozilla.org/intl/converter-input-stream;1']
-                                    .createInstance(Ci.nsIConverterInputStream);
-                    convertStream.init(inStream, encoding, inStream.available(),
-                    Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
-
-                    convertStream.readString(inStream.available(), readData);
-                    return readData.value;
-                } catch (e) {
-                    throw new Error((fileObj && fileObj.path || '') + ': ' + e);
-                } finally {
-                    if (convertStream) {
-                        convertStream.close();
-                    }
-                    if (inStream) {
-                        inStream.close();
-                    }
-                }
-            }
-        };
-
-        readFile = xpcUtil.readFile;
-
-        exec = function (string) {
-            return eval(string);
-        };
-
-        exists = function (fileName) {
-            return xpcUtil.xpfile(fileName).exists();
-        };
-
-        //Define a console.log for easier logging. Don't
-        //get fancy though.
-        if (typeof console === 'undefined') {
-            console = {
-                log: function () {
-                    print.apply(undefined, arguments);
-                }
-            };
-        }
-    }
-
-    /** vim: et:ts=4:sw=4:sts=4
- * @license RequireJS 2.1.22 Copyright (c) 2010-2015, The Dojo Foundation All Rights Reserved.
- * Available via the MIT or new BSD license.
- * see: http://github.com/jrburke/requirejs for details
- */
-//Not using strict: uneven strict support in browsers, #392, and causes
-//problems with requirejs.exec()/transpiler plugins that may not be strict.
-/*jslint regexp: true, nomen: true, sloppy: true */
-/*global window, navigator, document, importScripts, setTimeout, opera */
-
-
-(function (global) {
-    var req, s, head, baseElement, dataMain, src,
-        interactiveScript, currentlyAddingScript, mainScript, subPath,
-        version = '2.1.22',
-        commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg,
-        cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
-        jsSuffixRegExp = /\.js$/,
-        currDirRegExp = /^\.\//,
-        op = Object.prototype,
-        ostring = op.toString,
-        hasOwn = op.hasOwnProperty,
-        ap = Array.prototype,
-        isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document),
-        isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
-        //PS3 indicates loaded and complete, but need to wait for complete
-        //specifically. Sequence is 'loading', 'loaded', execution,
-        // then 'complete'. The UA check is unfortunate, but not sure how
-        //to feature test w/o causing perf issues.
-        readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
-                      /^complete$/ : /^(complete|loaded)$/,
-        defContextName = '_',
-        //Oh the tragedy, detecting opera. See the usage of isOpera for reason.
-        isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
-        contexts = {},
-        cfg = {},
-        globalDefQueue = [],
-        useInteractive = false;
-
-    function isFunction(it) {
-        return ostring.call(it) === '[object Function]';
-    }
-
-    function isArray(it) {
-        return ostring.call(it) === '[object Array]';
-    }
-
-    /**
-     * Helper function for iterating over an array. If the func returns
-     * a true value, it will break out of the loop.
-     */
-    function each(ary, func) {
-        if (ary) {
-            var i;
-            for (i = 0; i < ary.length; i += 1) {
-                if (ary[i] && func(ary[i], i, ary)) {
-                    break;
-                }
-            }
-        }
-    }
-
-    /**
-     * Helper function for iterating over an array backwards. If the func
-     * returns a true value, it will break out of the loop.
-     */
-    function eachReverse(ary, func) {
-        if (ary) {
-            var i;
-            for (i = ary.length - 1; i > -1; i -= 1) {
-                if (ary[i] && func(ary[i], i, ary)) {
-                    break;
-                }
-            }
-        }
-    }
-
-    function hasProp(obj, prop) {
-        return hasOwn.call(obj, prop);
-    }
-
-    function getOwn(obj, prop) {
-        return hasProp(obj, prop) && obj[prop];
-    }
-
-    /**
-     * Cycles over properties in an object and calls a function for each
-     * property value. If the function returns a truthy value, then the
-     * iteration is stopped.
-     */
-    function eachProp(obj, func) {
-        var prop;
-        for (prop in obj) {
-            if (hasProp(obj, prop)) {
-                if (func(obj[prop], prop)) {
-                    break;
-                }
-            }
-        }
-    }
-
-    /**
-     * Simple function to mix in properties from source into target,
-     * but only if target does not already have a property of the same name.
-     */
-    function mixin(target, source, force, deepStringMixin) {
-        if (source) {
-            eachProp(source, function (value, prop) {
-                if (force || !hasProp(target, prop)) {
-                    if (deepStringMixin && typeof value === 'object' && value &&
-                        !isArray(value) && !isFunction(value) &&
-                        !(value instanceof RegExp)) {
-
-                        if (!target[prop]) {
-                            target[prop] = {};
-                        }
-                        mixin(target[prop], value, force, deepStringMixin);
-                    } else {
-                        target[prop] = value;
-                    }
-                }
-            });
-        }
-        return target;
-    }
-
-    //Similar to Function.prototype.bind, but the 'this' object is specified
-    //first, since it is easier to read/figure out what 'this' will be.
-    function bind(obj, fn) {
-        return function () {
-            return fn.apply(obj, arguments);
-        };
-    }
-
-    function scripts() {
-        return document.getElementsByTagName('script');
-    }
-
-    function defaultOnError(err) {
-        throw err;
-    }
-
-    //Allow getting a global that is expressed in
-    //dot notation, like 'a.b.c'.
-    function getGlobal(value) {
-        if (!value) {
-            return value;
-        }
-        var g = global;
-        each(value.split('.'), function (part) {
-            g = g[part];
-        });
-        return g;
-    }
-
-    /**
-     * Constructs an error with a pointer to an URL with more information.
-     * @param {String} id the error ID that maps to an ID on a web page.
-     * @param {String} message human readable error.
-     * @param {Error} [err] the original error, if there is one.
-     *
-     * @returns {Error}
-     */
-    function makeError(id, msg, err, requireModules) {
-        var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id);
-        e.requireType = id;
-        e.requireModules = requireModules;
-        if (err) {
-            e.originalError = err;
-        }
-        return e;
-    }
-
-    if (typeof define !== 'undefined') {
-        //If a define is already in play via another AMD loader,
-        //do not overwrite.
-        return;
-    }
-
-    if (typeof requirejs !== 'undefined') {
-        if (isFunction(requirejs)) {
-            //Do not overwrite an existing requirejs instance.
-            return;
-        }
-        cfg = requirejs;
-        requirejs = undefined;
-    }
-
-    //Allow for a require config object
-    if (typeof require !== 'undefined' && !isFunction(require)) {
-        //assume it is a config object.
-        cfg = require;
-        require = undefined;
-    }
-
-    function newContext(contextName) {
-        var inCheckLoaded, Module, context, handlers,
-            checkLoadedTimeoutId,
-            config = {
-                //Defaults. Do not set a default for map
-                //config to speed up normalize(), which
-                //will run faster if there is no default.
-                waitSeconds: 7,
-                baseUrl: './',
-                paths: {},
-                bundles: {},
-                pkgs: {},
-                shim: {},
-                config: {}
-            },
-            registry = {},
-            //registry of just enabled modules, to speed
-            //cycle breaking code when lots of modules
-            //are registered, but not activated.
-            enabledRegistry = {},
-            undefEvents = {},
-            defQueue = [],
-            defined = {},
-            urlFetched = {},
-            bundlesMap = {},
-            requireCounter = 1,
-            unnormalizedCounter = 1;
-
-        /**
-         * Trims the . and .. from an array of path segments.
-         * It will keep a leading path segment if a .. will become
-         * the first path segment, to help with module name lookups,
-         * which act like paths, but can be remapped. But the end result,
-         * all paths that use this function should look normalized.
-         * NOTE: this method MODIFIES the input array.
-         * @param {Array} ary the array of path segments.
-         */
-        function trimDots(ary) {
-            var i, part;
-            for (i = 0; i < ary.length; i++) {
-                part = ary[i];
-                if (part === '.') {
-                    ary.splice(i, 1);
-                    i -= 1;
-                } else if (part === '..') {
-                    // If at the start, or previous value is still ..,
-                    // keep them so that when converted to a path it may
-                    // still work when converted to a path, even though
-                    // as an ID it is less than ideal. In larger point
-                    // releases, may be better to just kick out an error.
-                    if (i === 0 || (i === 1 && ary[2] === '..') || ary[i - 1] === '..') {
-                        continue;
-                    } else if (i > 0) {
-                        ary.splice(i - 1, 2);
-                        i -= 2;
-                    }
-                }
-            }
-        }
-
-        /**
-         * Given a relative module name, like ./something, normalize it to
-         * a real name that can be mapped to a path.
-         * @param {String} name the relative name
-         * @param {String} baseName a real name that the name arg is relative
-         * to.
-         * @param {Boolean} applyMap apply the map config to the value. Should
-         * only be done if this normalization is for a dependency ID.
-         * @returns {String} normalized name
-         */
-        function normalize(name, baseName, applyMap) {
-            var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex,
-                foundMap, foundI, foundStarMap, starI, normalizedBaseParts,
-                baseParts = (baseName && baseName.split('/')),
-                map = config.map,
-                starMap = map && map['*'];
-
-            //Adjust any relative paths.
-            if (name) {
-                name = name.split('/');
-                lastIndex = name.length - 1;
-
-                // If wanting node ID compatibility, strip .js from end
-                // of IDs. Have to do this here, and not in nameToUrl
-                // because node allows either .js or non .js to map
-                // to same file.
-                if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) {
-                    name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, '');
-                }
-
-                // Starts with a '.' so need the baseName
-                if (name[0].charAt(0) === '.' && baseParts) {
-                    //Convert baseName to array, and lop off the last part,
-                    //so that . matches that 'directory' and not name of the baseName's
-                    //module. For instance, baseName of 'one/two/three', maps to
-                    //'one/two/three.js', but we want the directory, 'one/two' for
-                    //this normalization.
-                    normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
-                    name = normalizedBaseParts.concat(name);
-                }
-
-                trimDots(name);
-                name = name.join('/');
-            }
-
-            //Apply map config if available.
-            if (applyMap && map && (baseParts || starMap)) {
-                nameParts = name.split('/');
-
-                outerLoop: for (i = nameParts.length; i > 0; i -= 1) {
-                    nameSegment = nameParts.slice(0, i).join('/');
-
-                    if (baseParts) {
-                        //Find the longest baseName segment match in the config.
-                        //So, do joins on the biggest to smallest lengths of baseParts.
-                        for (j = baseParts.length; j > 0; j -= 1) {
-                            mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
-
-                            //baseName segment has config, find if it has one for
-                            //this name.
-                            if (mapValue) {
-                                mapValue = getOwn(mapValue, nameSegment);
-                                if (mapValue) {
-                                    //Match, update name to the new value.
-                                    foundMap = mapValue;
-                                    foundI = i;
-                                    break outerLoop;
-                                }
-                            }
-                        }
-                    }
-
-                    //Check for a star map match, but just hold on to it,
-                    //if there is a shorter segment match later in a matching
-                    //config, then favor over this star map.
-                    if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
-                        foundStarMap = getOwn(starMap, nameSegment);
-                        starI = i;
-                    }
-                }
-
-                if (!foundMap && foundStarMap) {
-                    foundMap = foundStarMap;
-                    foundI = starI;
-                }
-
-                if (foundMap) {
-                    nameParts.splice(0, foundI, foundMap);
-                    name = nameParts.join('/');
-                }
-            }
-
-            // If the name points to a package's name, use
-            // the package main instead.
-            pkgMain = getOwn(config.pkgs, name);
-
-            return pkgMain ? pkgMain : name;
-        }
-
-        function removeScript(name) {
-            if (isBrowser) {
-                each(scripts(), function (scriptNode) {
-                    if (scriptNode.getAttribute('data-requiremodule') === name &&
-                            scriptNode.getAttribute('data-requirecontext') === context.contextName) {
-                        scriptNode.parentNode.removeChild(scriptNode);
-                        return true;
-                    }
-                });
-            }
-        }
-
-        function hasPathFallback(id) {
-            var pathConfig = getOwn(config.paths, id);
-            if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
-                //Pop off the first array value, since it failed, and
-                //retry
-                pathConfig.shift();
-                context.require.undef(id);
-
-                //Custom require that does not do map translation, since
-                //ID is "absolute", already mapped/resolved.
-                context.makeRequire(null, {
-                    skipMap: true
-                })([id]);
-
-                return true;
-            }
-        }
-
-        //Turns a plugin!resource to [plugin, resource]
-        //with the plugin being undefined if the name
-        //did not have a plugin prefix.
-        function splitPrefix(name) {
-            var prefix,
-                index = name ? name.indexOf('!') : -1;
-            if (index > -1) {
-                prefix = name.substring(0, index);
-                name = name.substring(index + 1, name.length);
-            }
-            return [prefix, name];
-        }
-
-        /**
-         * Creates a module mapping that includes plugin prefix, module
-         * name, and path. If parentModuleMap is provided it will
-         * also normalize the name via require.normalize()
-         *
-         * @param {String} name the module name
-         * @param {String} [parentModuleMap] parent module map
-         * for the module name, used to resolve relative names.
-         * @param {Boolean} isNormalized: is the ID already normalized.
-         * This is true if this call is done for a define() module ID.
-         * @param {Boolean} applyMap: apply the map config to the ID.
-         * Should only be true if this map is for a dependency.
-         *
-         * @returns {Object}
-         */
-        function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
-            var url, pluginModule, suffix, nameParts,
-                prefix = null,
-                parentName = parentModuleMap ? parentModuleMap.name : null,
-                originalName = name,
-                isDefine = true,
-                normalizedName = '';
-
-            //If no name, then it means it is a require call, generate an
-            //internal name.
-            if (!name) {
-                isDefine = false;
-                name = '_@r' + (requireCounter += 1);
-            }
-
-            nameParts = splitPrefix(name);
-            prefix = nameParts[0];
-            name = nameParts[1];
-
-            if (prefix) {
-                prefix = normalize(prefix, parentName, applyMap);
-                pluginModule = getOwn(defined, prefix);
-            }
-
-            //Account for relative paths if there is a base name.
-            if (name) {
-                if (prefix) {
-                    if (pluginModule && pluginModule.normalize) {
-                        //Plugin is loaded, use its normalize method.
-                        normalizedName = pluginModule.normalize(name, function (name) {
-                            return normalize(name, parentName, applyMap);
-                        });
-                    } else {
-                        // If nested plugin references, then do not try to
-                        // normalize, as it will not normalize correctly. This
-                        // places a restriction on resourceIds, and the longer
-                        // term solution is not to normalize until plugins are
-                        // loaded and all normalizations to allow for async
-                        // loading of a loader plugin. But for now, fixes the
-                        // common uses. Details in #1131
-                        normalizedName = name.indexOf('!') === -1 ?
-                                         normalize(name, parentName, applyMap) :
-                                         name;
-                    }
-                } else {
-                    //A regular module.
-                    normalizedName = normalize(name, parentName, applyMap);
-
-                    //Normalized name may be a plugin ID due to map config
-                    //application in normalize. The map config values must
-                    //already be normalized, so do not need to redo that part.
-                    nameParts = splitPrefix(normalizedName);
-                    prefix = nameParts[0];
-                    normalizedName = nameParts[1];
-                    isNormalized = true;
-
-                    url = context.nameToUrl(normalizedName);
-                }
-            }
-
-            //If the id is a plugin id that cannot be determined if it needs
-            //normalization, stamp it with a unique ID so two matching relative
-            //ids that may conflict can be separate.
-            suffix = prefix && !pluginModule && !isNormalized ?
-                     '_unnormalized' + (unnormalizedCounter += 1) :
-                     '';
-
-            return {
-                prefix: prefix,
-                name: normalizedName,
-                parentMap: parentModuleMap,
-                unnormalized: !!suffix,
-                url: url,
-                originalName: originalName,
-                isDefine: isDefine,
-                id: (prefix ?
-                        prefix + '!' + normalizedName :
-                        normalizedName) + suffix
-            };
-        }
-
-        function getModule(depMap) {
-            var id = depMap.id,
-                mod = getOwn(registry, id);
-
-            if (!mod) {
-                mod = registry[id] = new context.Module(depMap);
-            }
-
-            return mod;
-        }
-
-        function on(depMap, name, fn) {
-            var id = depMap.id,
-                mod = getOwn(registry, id);
-
-            if (hasProp(defined, id) &&
-                    (!mod || mod.defineEmitComplete)) {
-                if (name === 'defined') {
-                    fn(defined[id]);
-                }
-            } else {
-                mod = getModule(depMap);
-                if (mod.error && name === 'error') {
-                    fn(mod.error);
-                } else {
-                    mod.on(name, fn);
-                }
-            }
-        }
-
-        function onError(err, errback) {
-            var ids = err.requireModules,
-                notified = false;
-
-            if (errback) {
-                errback(err);
-            } else {
-                each(ids, function (id) {
-                    var mod = getOwn(registry, id);
-                    if (mod) {
-                        //Set error on module, so it skips timeout checks.
-                        mod.error = err;
-                        if (mod.events.error) {
-                            notified = true;
-                            mod.emit('error', err);
-                        }
-                    }
-                });
-
-                if (!notified) {
-                    req.onError(err);
-                }
-            }
-        }
-
-        /**
-         * Internal method to transfer globalQueue items to this context's
-         * defQueue.
-         */
-        function takeGlobalQueue() {
-            //Push all the globalDefQueue items into the context's defQueue
-            if (globalDefQueue.length) {
-                each(globalDefQueue, function(queueItem) {
-                    var id = queueItem[0];
-                    if (typeof id === 'string') {
-                        context.defQueueMap[id] = true;
-                    }
-                    defQueue.push(queueItem);
-                });
-                globalDefQueue = [];
-            }
-        }
-
-        handlers = {
-            'require': function (mod) {
-                if (mod.require) {
-                    return mod.require;
-                } else {
-                    return (mod.require = context.makeRequire(mod.map));
-                }
-            },
-            'exports': function (mod) {
-                mod.usingExports = true;
-                if (mod.map.isDefine) {
-                    if (mod.exports) {
-                        return (defined[mod.map.id] = mod.exports);
-                    } else {
-                        return (mod.exports = defined[mod.map.id] = {});
-                    }
-                }
-            },
-            'module': function (mod) {
-                if (mod.module) {
-                    return mod.module;
-                } else {
-                    return (mod.module = {
-                        id: mod.map.id,
-                        uri: mod.map.url,
-                        config: function () {
-                            return getOwn(config.config, mod.map.id) || {};
-                        },
-                        exports: mod.exports || (mod.exports = {})
-                    });
-                }
-            }
-        };
-
-        function cleanRegistry(id) {
-            //Clean up machinery used for waiting modules.
-            delete registry[id];
-            delete enabledRegistry[id];
-        }
-
-        function breakCycle(mod, traced, processed) {
-            var id = mod.map.id;
-
-            if (mod.error) {
-                mod.emit('error', mod.error);
-            } else {
-                traced[id] = true;
-                each(mod.depMaps, function (depMap, i) {
-                    var depId = depMap.id,
-                        dep = getOwn(registry, depId);
-
-                    //Only force things that have not completed
-                    //being defined, so still in the registry,
-                    //and only if it has not been matched up
-                    //in the module already.
-                    if (dep && !mod.depMatched[i] && !processed[depId]) {
-                        if (getOwn(traced, depId)) {
-                            mod.defineDep(i, defined[depId]);
-                            mod.check(); //pass false?
-                        } else {
-                            breakCycle(dep, traced, processed);
-                        }
-                    }
-                });
-                processed[id] = true;
-            }
-        }
-
-        function checkLoaded() {
-            var err, usingPathFallback,
-                waitInterval = config.waitSeconds * 1000,
-                //It is possible to disable the wait interval by using waitSeconds of 0.
-                expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
-                noLoads = [],
-                reqCalls = [],
-                stillLoading = false,
-                needCycleCheck = true;
-
-            //Do not bother if this call was a result of a cycle break.
-            if (inCheckLoaded) {
-                return;
-            }
-
-            inCheckLoaded = true;
-
-            //Figure out the state of all the modules.
-            eachProp(enabledRegistry, function (mod) {
-                var map = mod.map,
-                    modId = map.id;
-
-                //Skip things that are not enabled or in error state.
-                if (!mod.enabled) {
-                    return;
-                }
-
-                if (!map.isDefine) {
-                    reqCalls.push(mod);
-                }
-
-                if (!mod.error) {
-                    //If the module should be executed, and it has not
-                    //been inited and time is up, remember it.
-                    if (!mod.inited && expired) {
-                        if (hasPathFallback(modId)) {
-                            usingPathFallback = true;
-                            stillLoading = true;
-                        } else {
-                            noLoads.push(modId);
-                            removeScript(modId);
-                        }
-                    } else if (!mod.inited && mod.fetched && map.isDefine) {
-                        stillLoading = true;
-                        if (!map.prefix) {
-                            //No reason to keep looking for unfinished
-                            //loading. If the only stillLoading is a
-                            //plugin resource though, keep going,
-                            //because it may be that a plugin resource
-                            //is waiting on a non-plugin cycle.
-                            return (needCycleCheck = false);
-                        }
-                    }
-                }
-            });
-
-            if (expired && noLoads.length) {
-                //If wait time expired, throw error of unloaded modules.
-                err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
-                err.contextName = context.contextName;
-                return onError(err);
-            }
-
-            //Not expired, check for a cycle.
-            if (needCycleCheck) {
-                each(reqCalls, function (mod) {
-                    breakCycle(mod, {}, {});
-                });
-            }
-
-            //If still waiting on loads, and the waiting load is something
-            //other than a plugin resource, or there are still outstanding
-            //scripts, then just try back later.
-            if ((!expired || usingPathFallback) && stillLoading) {
-                //Something is still waiting to load. Wait for it, but only
-                //if a timeout is not already in effect.
-                if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
-                    checkLoadedTimeoutId = setTimeout(function () {
-                        checkLoadedTimeoutId = 0;
-                        checkLoaded();
-                    }, 50);
-                }
-            }
-
-            inCheckLoaded = false;
-        }
-
-        Module = function (map) {
-            this.events = getOwn(undefEvents, map.id) || {};
-            this.map = map;
-            this.shim = getOwn(config.shim, map.id);
-            this.depExports = [];
-            this.depMaps = [];
-            this.depMatched = [];
-            this.pluginMaps = {};
-            this.depCount = 0;
-
-            /* this.exports this.factory
-               this.depMaps = [],
-               this.enabled, this.fetched
-            */
-        };
-
-        Module.prototype = {
-            init: function (depMaps, factory, errback, options) {
-                options = options || {};
-
-                //Do not do more inits if already done. Can happen if there
-                //are multiple define calls for the same module. That is not
-                //a normal, common case, but it is also not unexpected.
-                if (this.inited) {
-                    return;
-                }
-
-                this.factory = factory;
-
-                if (errback) {
-                    //Register for errors on this module.
-                    this.on('error', errback);
-                } else if (this.events.error) {
-                    //If no errback already, but there are error listeners
-                    //on this module, set up an errback to pass to the deps.
-                    errback = bind(this, function (err) {
-                        this.emit('error', err);
-                    });
-                }
-
-                //Do a copy of the dependency array, so that
-                //source inputs are not modified. For example
-                //"shim" deps are passed in here directly, and
-                //doing a direct modification of the depMaps array
-                //would affect that config.
-                this.depMaps = depMaps && depMaps.slice(0);
-
-                this.errback = errback;
-
-                //Indicate this module has be initialized
-                this.inited = true;
-
-                this.ignore = options.ignore;
-
-                //Could have option to init this module in enabled mode,
-                //or could have been previously marked as enabled. However,
-                //the dependencies are not known until init is called. So
-                //if enabled previously, now trigger dependencies as enabled.
-                if (options.enabled || this.enabled) {
-                    //Enable this module and dependencies.
-                    //Will call this.check()
-                    this.enable();
-                } else {
-                    this.check();
-                }
-            },
-
-            defineDep: function (i, depExports) {
-                //Because of cycles, defined callback for a given
-                //export can be called more than once.
-                if (!this.depMatched[i]) {
-                    this.depMatched[i] = true;
-                    this.depCount -= 1;
-                    this.depExports[i] = depExports;
-                }
-            },
-
-            fetch: function () {
-                if (this.fetched) {
-                    return;
-                }
-                this.fetched = true;
-
-                context.startTime = (new Date()).getTime();
-
-                var map = this.map;
-
-                //If the manager is for a plugin managed resource,
-                //ask the plugin to load it now.
-                if (this.shim) {
-                    context.makeRequire(this.map, {
-                        enableBuildCallback: true
-                    })(this.shim.deps || [], bind(this, function () {
-                        return map.prefix ? this.callPlugin() : this.load();
-                    }));
-                } else {
-                    //Regular dependency.
-                    return map.prefix ? this.callPlugin() : this.load();
-                }
-            },
-
-            load: function () {
-                var url = this.map.url;
-
-                //Regular dependency.
-                if (!urlFetched[url]) {
-                    urlFetched[url] = true;
-                    context.load(this.map.id, url);
-                }
-            },
-
-            /**
-             * Checks if the module is ready to define itself, and if so,
-             * define it.
-             */
-            check: function () {
-                if (!this.enabled || this.enabling) {
-                    return;
-                }
-
-                var err, cjsModule,
-                    id = this.map.id,
-                    depExports = this.depExports,
-                    exports = this.exports,
-                    factory = this.factory;
-
-                if (!this.inited) {
-                    // Only fetch if not already in the defQueue.
-                    if (!hasProp(context.defQueueMap, id)) {
-                        this.fetch();
-                    }
-                } else if (this.error) {
-                    this.emit('error', this.error);
-                } else if (!this.defining) {
-                    //The factory could trigger another require call
-                    //that would result in checking this module to
-                    //define itself again. If already in the process
-                    //of doing that, skip this work.
-                    this.defining = true;
-
-                    if (this.depCount < 1 && !this.defined) {
-                        if (isFunction(factory)) {
-                            try {
-                                exports = context.execCb(id, factory, depExports, exports);
-                            } catch (e) {
-                                err = e;
-                            }
-
-                            // Favor return value over exports. If node/cjs in play,
-                            // then will not have a return value anyway. Favor
-                            // module.exports assignment over exports object.
-                            if (this.map.isDefine && exports === undefined) {
-                                cjsModule = this.module;
-                                if (cjsModule) {
-                                    exports = cjsModule.exports;
-                                } else if (this.usingExports) {
-                                    //exports already set the defined value.
-                                    exports = this.exports;
-                                }
-                            }
-
-                            if (err) {
-                                // If there is an error listener, favor passing
-                                // to that instead of throwing an error. However,
-                                // only do it for define()'d  modules. require
-                                // errbacks should not be called for failures in
-                                // their callbacks (#699). However if a global
-                                // onError is set, use that.
-                                if ((this.events.error && this.map.isDefine) ||
-                                    req.onError !== defaultOnError) {
-                                    err.requireMap = this.map;
-                                    err.requireModules = this.map.isDefine ? [this.map.id] : null;
-                                    err.requireType = this.map.isDefine ? 'define' : 'require';
-                                    return onError((this.error = err));
-                                } else if (typeof console !== 'undefined' &&
-                                           console.error) {
-                                    // Log the error for debugging. If promises could be
-                                    // used, this would be different, but making do.
-                                    console.error(err);
-                                } else {
-                                    // Do not want to completely lose the error. While this
-                                    // will mess up processing and lead to similar results
-                                    // as bug 1440, it at least surfaces the error.
-                                    req.onError(err);
-                                }
-                            }
-                        } else {
-                            //Just a literal value
-                            exports = factory;
-                        }
-
-                        this.exports = exports;
-
-                        if (this.map.isDefine && !this.ignore) {
-                            defined[id] = exports;
-
-                            if (req.onResourceLoad) {
-                                var resLoadMaps = [];
-                                each(this.depMaps, function (depMap) {
-                                    resLoadMaps.push(depMap.normalizedMap || depMap);
-                                });
-                                req.onResourceLoad(context, this.map, resLoadMaps);
-                            }
-                        }
-
-                        //Clean up
-                        cleanRegistry(id);
-
-                        this.defined = true;
-                    }
-
-                    //Finished the define stage. Allow calling check again
-                    //to allow define notifications below in the case of a
-                    //cycle.
-                    this.defining = false;
-
-                    if (this.defined && !this.defineEmitted) {
-                        this.defineEmitted = true;
-                        this.emit('defined', this.exports);
-                        this.defineEmitComplete = true;
-                    }
-
-                }
-            },
-
-            callPlugin: function () {
-                var map = this.map,
-                    id = map.id,
-                    //Map already normalized the prefix.
-                    pluginMap = makeModuleMap(map.prefix);
-
-                //Mark this as a dependency for this plugin, so it
-                //can be traced for cycles.
-                this.depMaps.push(pluginMap);
-
-                on(pluginMap, 'defined', bind(this, function (plugin) {
-                    var load, normalizedMap, normalizedMod,
-                        bundleId = getOwn(bundlesMap, this.map.id),
-                        name = this.map.name,
-                        parentName = this.map.parentMap ? this.map.parentMap.name : null,
-                        localRequire = context.makeRequire(map.parentMap, {
-                            enableBuildCallback: true
-                        });
-
-                    //If current map is not normalized, wait for that
-                    //normalized name to load instead of continuing.
-                    if (this.map.unnormalized) {
-                        //Normalize the ID if the plugin allows it.
-                        if (plugin.normalize) {
-                            name = plugin.normalize(name, function (name) {
-                                return normalize(name, parentName, true);
-                            }) || '';
-                        }
-
-                        //prefix and name should already be normalized, no need
-                        //for applying map config again either.
-                        normalizedMap = makeModuleMap(map.prefix + '!' + name,
-                                                      this.map.parentMap);
-                        on(normalizedMap,
-                            'defined', bind(this, function (value) {
-                                this.map.normalizedMap = normalizedMap;
-                                this.init([], function () { return value; }, null, {
-                                    enabled: true,
-                                    ignore: true
-                                });
-                            }));
-
-                        normalizedMod = getOwn(registry, normalizedMap.id);
-                        if (normalizedMod) {
-                            //Mark this as a dependency for this plugin, so it
-                            //can be traced for cycles.
-                            this.depMaps.push(normalizedMap);
-
-                            if (this.events.error) {
-                                normalizedMod.on('error', bind(this, function (err) {
-                                    this.emit('error', err);
-                                }));
-                            }
-                            normalizedMod.enable();
-                        }
-
-                        return;
-                    }
-
-                    //If a paths config, then just load that file instead to
-                    //resolve the plugin, as it is built into that paths layer.
-                    if (bundleId) {
-                        this.map.url = context.nameToUrl(bundleId);
-                        this.load();
-                        return;
-                    }
-
-                    load = bind(this, function (value) {
-                        this.init([], function () { return value; }, null, {
-                            enabled: true
-                        });
-                    });
-
-                    load.error = bind(this, function (err) {
-                        this.inited = true;
-                        this.error = err;
-                        err.requireModules = [id];
-
-                        //Remove temp unnormalized modules for this module,
-                        //since they will never be resolved otherwise now.
-                        eachProp(registry, function (mod) {
-                            if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
-                                cleanRegistry(mod.map.id);
-                            }
-                        });
-
-                        onError(err);
-                    });
-
-                    //Allow plugins to load other code without having to know the
-                    //context or how to 'complete' the load.
-                    load.fromText = bind(this, function (text, textAlt) {
-                        /*jslint evil: true */
-                        var moduleName = map.name,
-                            moduleMap = makeModuleMap(moduleName),
-                            hasInteractive = useInteractive;
-
-                        //As of 2.1.0, support just passing the text, to reinforce
-                        //fromText only being called once per resource. Still
-                        //support old style of passing moduleName but discard
-                        //that moduleName in favor of the internal ref.
-                        if (textAlt) {
-                            text = textAlt;
-                        }
-
-                        //Turn off interactive script matching for IE for any define
-                        //calls in the text, then turn it back on at the end.
-                        if (hasInteractive) {
-                            useInteractive = false;
-                        }
-
-                        //Prime the system by creating a module instance for
-                        //it.
-                        getModule(moduleMap);
-
-                        //Transfer any config to this other module.
-                        if (hasProp(config.config, id)) {
-                            config.config[moduleName] = config.config[id];
-                        }
-
-                        try {
-                            req.exec(text);
-                        } catch (e) {
-                            return onError(makeError('fromtexteval',
-                                             'fromText eval for ' + id +
-                                            ' failed: ' + e,
-                                             e,
-                                             [id]));
-                        }
-
-                        if (hasInteractive) {
-                            useInteractive = true;
-                        }
-
-                        //Mark this as a dependency for the plugin
-                        //resource
-                        this.depMaps.push(moduleMap);
-
-                        //Support anonymous modules.
-                        context.completeLoad(moduleName);
-
-                        //Bind the value of that module to the value for this
-                        //resource ID.
-                        localRequire([moduleName], load);
-                    });
-
-                    //Use parentName here since the plugin's name is not reliable,
-                    //could be some weird string with no path that actually wants to
-                    //reference the parentName's path.
-                    plugin.load(map.name, localRequire, load, config);
-                }));
-
-                context.enable(pluginMap, this);
-                this.pluginMaps[pluginMap.id] = pluginMap;
-            },
-
-            enable: function () {
-                enabledRegistry[this.map.id] = this;
-                this.enabled = true;
-
-                //Set flag mentioning that the module is enabling,
-                //so that immediate calls to the defined callbacks
-                //for dependencies do not trigger inadvertent load
-                //with the depCount still being zero.
-                this.enabling = true;
-
-                //Enable each dependency
-                each(this.depMaps, bind(this, function (depMap, i) {
-                    var id, mod, handler;
-
-                    if (typeof depMap === 'string') {
-                        //Dependency needs to be converted to a depMap
-                        //and wired up to this module.
-                        depMap = makeModuleMap(depMap,
-                                               (this.map.isDefine ? this.map : this.map.parentMap),
-                                               false,
-                                               !this.skipMap);
-                        this.depMaps[i] = depMap;
-
-                        handler = getOwn(handlers, depMap.id);
-
-                        if (handler) {
-                            this.depExports[i] = handler(this);
-                            return;
-                        }
-
-                        this.depCount += 1;
-
-                        on(depMap, 'defined', bind(this, function (depExports) {
-                            if (this.undefed) {
-                                return;
-                            }
-                            this.defineDep(i, depExports);
-                            this.check();
-                        }));
-
-                        if (this.errback) {
-                            on(depMap, 'error', bind(this, this.errback));
-                        } else if (this.events.error) {
-                            // No direct errback on this module, but something
-                            // else is listening for errors, so be sure to
-                            // propagate the error correctly.
-                            on(depMap, 'error', bind(this, function(err) {
-                                this.emit('error', err);
-                            }));
-                        }
-                    }
-
-                    id = depMap.id;
-                    mod = registry[id];
-
-                    //Skip special modules like 'require', 'exports', 'module'
-                    //Also, don't call enable if it is already enabled,
-                    //important in circular dependency cases.
-                    if (!hasProp(handlers, id) && mod && !mod.enabled) {
-                        context.enable(depMap, this);
-                    }
-                }));
-
-                //Enable each plugin that is used in
-                //a dependency
-                eachProp(this.pluginMaps, bind(this, function (pluginMap) {
-                    var mod = getOwn(registry, pluginMap.id);
-                    if (mod && !mod.enabled) {
-                        context.enable(pluginMap, this);
-                    }
-                }));
-
-                this.enabling = false;
-
-                this.check();
-            },
-
-            on: function (name, cb) {
-                var cbs = this.events[name];
-                if (!cbs) {
-                    cbs = this.events[name] = [];
-                }
-                cbs.push(cb);
-            },
-
-            emit: function (name, evt) {
-                each(this.events[name], function (cb) {
-                    cb(evt);
-                });
-                if (name === 'error') {
-                    //Now that the error handler was triggered, remove
-                    //the listeners, since this broken Module instance
-                    //can stay around for a while in the registry.
-                    delete this.events[name];
-                }
-            }
-        };
-
-        function callGetModule(args) {
-            //Skip modules already defined.
-            if (!hasProp(defined, args[0])) {
-                getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
-            }
-        }
-
-        function removeListener(node, func, name, ieName) {
-            //Favor detachEvent because of IE9
-            //issue, see attachEvent/addEventListener comment elsewhere
-            //in this file.
-            if (node.detachEvent && !isOpera) {
-                //Probably IE. If not it will throw an error, which will be
-                //useful to know.
-                if (ieName) {
-                    node.detachEvent(ieName, func);
-                }
-            } else {
-                node.removeEventListener(name, func, false);
-            }
-        }
-
-        /**
-         * Given an event from a script node, get the requirejs info from it,
-         * and then removes the event listeners on the node.
-         * @param {Event} evt
-         * @returns {Object}
-         */
-        function getScriptData(evt) {
-            //Using currentTarget instead of target for Firefox 2.0's sake. Not
-            //all old browsers will be supported, but this one was easy enough
-            //to support and still makes sense.
-            var node = evt.currentTarget || evt.srcElement;
-
-            //Remove the listeners once here.
-            removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
-            removeListener(node, context.onScriptError, 'error');
-
-            return {
-                node: node,
-                id: node && node.getAttribute('data-requiremodule')
-            };
-        }
-
-        function intakeDefines() {
-            var args;
-
-            //Any defined modules in the global queue, intake them now.
-            takeGlobalQueue();
-
-            //Make sure any remaining defQueue items get properly processed.
-            while (defQueue.length) {
-                args = defQueue.shift();
-                if (args[0] === null) {
-                    return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' +
-                        args[args.length - 1]));
-                } else {
-                    //args are id, deps, factory. Should be normalized by the
-                    //define() function.
-                    callGetModule(args);
-                }
-            }
-            context.defQueueMap = {};
-        }
-
-        context = {
-            config: config,
-            contextName: contextName,
-            registry: registry,
-            defined: defined,
-            urlFetched: urlFetched,
-            defQueue: defQueue,
-            defQueueMap: {},
-            Module: Module,
-            makeModuleMap: makeModuleMap,
-            nextTick: req.nextTick,
-            onError: onError,
-
-            /**
-             * Set a configuration for the context.
-             * @param {Object} cfg config object to integrate.
-             */
-            configure: function (cfg) {
-                //Make sure the baseUrl ends in a slash.
-                if (cfg.baseUrl) {
-                    if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
-                        cfg.baseUrl += '/';
-                    }
-                }
-
-                //Save off the paths since they require special processing,
-                //they are additive.
-                var shim = config.shim,
-                    objs = {
-                        paths: true,
-                        bundles: true,
-                        config: true,
-                        map: true
-                    };
-
-                eachProp(cfg, function (value, prop) {
-                    if (objs[prop]) {
-                        if (!config[prop]) {
-                            config[prop] = {};
-                        }
-                        mixin(config[prop], value, true, true);
-                    } else {
-                        config[prop] = value;
-                    }
-                });
-
-                //Reverse map the bundles
-                if (cfg.bundles) {
-                    eachProp(cfg.bundles, function (value, prop) {
-                        each(value, function (v) {
-                            if (v !== prop) {
-                                bundlesMap[v] = prop;
-                            }
-                        });
-                    });
-                }
-
-                //Merge shim
-                if (cfg.shim) {
-                    eachProp(cfg.shim, function (value, id) {
-                        //Normalize the structure
-                        if (isArray(value)) {
-                            value = {
-                                deps: value
-                            };
-                        }
-                        if ((value.exports || value.init) && !value.exportsFn) {
-                            value.exportsFn = context.makeShimExports(value);
-                        }
-                        shim[id] = value;
-                    });
-                    config.shim = shim;
-                }
-
-                //Adjust packages if necessary.
-                if (cfg.packages) {
-                    each(cfg.packages, function (pkgObj) {
-                        var location, name;
-
-                        pkgObj = typeof pkgObj === 'string' ? {name: pkgObj} : pkgObj;
-
-                        name = pkgObj.name;
-                        location = pkgObj.location;
-                        if (location) {
-                            config.paths[name] = pkgObj.location;
-                        }
-
-                        //Save pointer to main module ID for pkg name.
-                        //Remove leading dot in main, so main paths are normalized,
-                        //and remove any trailing .js, since different package
-                        //envs have different conventions: some use a module name,
-                        //some use a file name.
-                        config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main')
-                                     .replace(currDirRegExp, '')
-                                     .replace(jsSuffixRegExp, '');
-                    });
-                }
-
-                //If there are any "waiting to execute" modules in the registry,
-                //update the maps for them, since their info, like URLs to load,
-                //may have changed.
-                eachProp(registry, function (mod, id) {
-                    //If module already has init called, since it is too
-                    //late to modify them, and ignore unnormalized ones
-                    //since they are transient.
-                    if (!mod.inited && !mod.map.unnormalized) {
-                        mod.map = makeModuleMap(id, null, true);
-                    }
-                });
-
-                //If a deps array or a config callback is specified, then call
-                //require with those args. This is useful when require is defined as a
-                //config object before require.js is loaded.
-                if (cfg.deps || cfg.callback) {
-                    context.require(cfg.deps || [], cfg.callback);
-                }
-            },
-
-            makeShimExports: function (value) {
-                function fn() {
-                    var ret;
-                    if (value.init) {
-                        ret = value.init.apply(global, arguments);
-                    }
-                    return ret || (value.exports && getGlobal(value.exports));
-                }
-                return fn;
-            },
-
-            makeRequire: function (relMap, options) {
-                options = options || {};
-
-                function localRequire(deps, callback, errback) {
-                    var id, map, requireMod;
-
-                    if (options.enableBuildCallback && callback && isFunction(callback)) {
-                        callback.__requireJsBuild = true;
-                    }
-
-                    if (typeof deps === 'string') {
-                        if (isFunction(callback)) {
-                            //Invalid call
-                            return onError(makeError('requireargs', 'Invalid require call'), errback);
-                        }
-
-                        //If require|exports|module are requested, get the
-                        //value for them from the special handlers. Caveat:
-                        //this only works while module is being defined.
-                        if (relMap && hasProp(handlers, deps)) {
-                            return handlers[deps](registry[relMap.id]);
-                        }
-
-                        //Synchronous access to one module. If require.get is
-                        //available (as in the Node adapter), prefer that.
-                        if (req.get) {
-                            return req.get(context, deps, relMap, localRequire);
-                        }
-
-                        //Normalize module name, if it contains . or ..
-                        map = makeModuleMap(deps, relMap, false, true);
-                        id = map.id;
-
-                        if (!hasProp(defined, id)) {
-                            return onError(makeError('notloaded', 'Module name "' +
-                                        id +
-                                        '" has not been loaded yet for context: ' +
-                                        contextName +
-                                        (relMap ? '' : '. Use require([])')));
-                        }
-                        return defined[id];
-                    }
-
-                    //Grab defines waiting in the global queue.
-                    intakeDefines();
-
-                    //Mark all the dependencies as needing to be loaded.
-                    context.nextTick(function () {
-                        //Some defines could have been added since the
-                        //require call, collect them.
-                        intakeDefines();
-
-                        requireMod = getModule(makeModuleMap(null, relMap));
-
-                        //Store if map config should be applied to this require
-                        //call for dependencies.
-                        requireMod.skipMap = options.skipMap;
-
-                        requireMod.init(deps, callback, errback, {
-                            enabled: true
-                        });
-
-                        checkLoaded();
-                    });
-
-                    return localRequire;
-                }
-
-                mixin(localRequire, {
-                    isBrowser: isBrowser,
-
-                    /**
-                     * Converts a module name + .extension into an URL path.
-                     * *Requires* the use of a module name. It does not support using
-                     * plain URLs like nameToUrl.
-                     */
-                    toUrl: function (moduleNamePlusExt) {
-                        var ext,
-                            index = moduleNamePlusExt.lastIndexOf('.'),
-                            segment = moduleNamePlusExt.split('/')[0],
-                            isRelative = segment === '.' || segment === '..';
-
-                        //Have a file extension alias, and it is not the
-                        //dots from a relative path.
-                        if (index !== -1 && (!isRelative || index > 1)) {
-                            ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
-                            moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
-                        }
-
-                        return context.nameToUrl(normalize(moduleNamePlusExt,
-                                                relMap && relMap.id, true), ext,  true);
-                    },
-
-                    defined: function (id) {
-                        return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
-                    },
-
-                    specified: function (id) {
-                        id = makeModuleMap(id, relMap, false, true).id;
-                        return hasProp(defined, id) || hasProp(registry, id);
-                    }
-                });
-
-                //Only allow undef on top level require calls
-                if (!relMap) {
-                    localRequire.undef = function (id) {
-                        //Bind any waiting define() calls to this context,
-                        //fix for #408
-                        takeGlobalQueue();
-
-                        var map = makeModuleMap(id, relMap, true),
-                            mod = getOwn(registry, id);
-
-                        mod.undefed = true;
-                        removeScript(id);
-
-                        delete defined[id];
-                        delete urlFetched[map.url];
-                        delete undefEvents[id];
-
-                        //Clean queued defines too. Go backwards
-                        //in array so that the splices do not
-                        //mess up the iteration.
-                        eachReverse(defQueue, function(args, i) {
-                            if (args[0] === id) {
-                                defQueue.splice(i, 1);
-                            }
-                        });
-                        delete context.defQueueMap[id];
-
-                        if (mod) {
-                            //Hold on to listeners in case the
-                            //module will be attempted to be reloaded
-                            //using a different config.
-                            if (mod.events.defined) {
-                                undefEvents[id] = mod.events;
-                            }
-
-                            cleanRegistry(id);
-                        }
-                    };
-                }
-
-                return localRequire;
-            },
-
-            /**
-             * Called to enable a module if it is still in the registry
-             * awaiting enablement. A second arg, parent, the parent module,
-             * is passed in for context, when this method is overridden by
-             * the optimizer. Not shown here to keep code compact.
-             */
-            enable: function (depMap) {
-                var mod = getOwn(registry, depMap.id);
-                if (mod) {
-                    getModule(depMap).enable();
-                }
-            },
-
-            /**
-             * Internal method used by environment adapters to complete a load event.
-             * A load event could be a script load or just a load pass from a synchronous
-             * load call.
-             * @param {String} moduleName the name of the module to potentially complete.
-             */
-            completeLoad: function (moduleName) {
-                var found, args, mod,
-                    shim = getOwn(config.shim, moduleName) || {},
-                    shExports = shim.exports;
-
-                takeGlobalQueue();
-
-                while (defQueue.length) {
-                    args = defQueue.shift();
-                    if (args[0] === null) {
-                        args[0] = moduleName;
-                        //If already found an anonymous module and bound it
-                        //to this name, then this is some other anon module
-                        //waiting for its completeLoad to fire.
-                        if (found) {
-                            break;
-                        }
-                        found = true;
-                    } else if (args[0] === moduleName) {
-                        //Found matching define call for this script!
-                        found = true;
-                    }
-
-                    callGetModule(args);
-                }
-                context.defQueueMap = {};
-
-                //Do this after the cycle of callGetModule in case the result
-                //of those calls/init calls changes the registry.
-                mod = getOwn(registry, moduleName);
-
-                if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
-                    if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
-                        if (hasPathFallback(moduleName)) {
-                            return;
-                        } else {
-                            return onError(makeError('nodefine',
-                                             'No define call for ' + moduleName,
-                                             null,
-                                             [moduleName]));
-                        }
-                    } else {
-                        //A script that does not call define(), so just simulate
-                        //the call for it.
-                        callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
-                    }
-                }
-
-                checkLoaded();
-            },
-
-            /**
-             * Converts a module name to a file path. Supports cases where
-             * moduleName may actually be just an URL.
-             * Note that it **does not** call normalize on the moduleName,
-             * it is assumed to have already been normalized. This is an
-             * internal API, not a public one. Use toUrl for the public API.
-             */
-            nameToUrl: function (moduleName, ext, skipExt) {
-                var paths, syms, i, parentModule, url,
-                    parentPath, bundleId,
-                    pkgMain = getOwn(config.pkgs, moduleName);
-
-                if (pkgMain) {
-                    moduleName = pkgMain;
-                }
-
-                bundleId = getOwn(bundlesMap, moduleName);
-
-                if (bundleId) {
-                    return context.nameToUrl(bundleId, ext, skipExt);
-                }
-
-                //If a colon is in the URL, it indicates a protocol is used and it is just
-                //an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
-                //or ends with .js, then assume the user meant to use an url and not a module id.
-                //The slash is important for protocol-less URLs as well as full paths.
-                if (req.jsExtRegExp.test(moduleName)) {
-                    //Just a plain path, not module name lookup, so just return it.
-                    //Add extension if it is included. This is a bit wonky, only non-.js things pass
-                    //an extension, this method probably needs to be reworked.
-                    url = moduleName + (ext || '');
-                } else {
-                    //A module that needs to be converted to a path.
-                    paths = config.paths;
-
-                    syms = moduleName.split('/');
-                    //For each module name segment, see if there is a path
-                    //registered for it. Start with most specific name
-                    //and work up from it.
-                    for (i = syms.length; i > 0; i -= 1) {
-                        parentModule = syms.slice(0, i).join('/');
-
-                        parentPath = getOwn(paths, parentModule);
-                        if (parentPath) {
-                            //If an array, it means there are a few choices,
-                            //Choose the one that is desired
-                            if (isArray(parentPath)) {
-                                parentPath = parentPath[0];
-                            }
-                            syms.splice(0, i, parentPath);
-                            break;
-                        }
-                    }
-
-                    //Join the path parts together, then figure out if baseUrl is needed.
-                    url = syms.join('/');
-                    url += (ext || (/^data\:|\?/.test(url) || skipExt ? '' : '.js'));
-                    url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
-                }
-
-                return config.urlArgs ? url +
-                                        ((url.indexOf('?') === -1 ? '?' : '&') +
-                                         config.urlArgs) : url;
-            },
-
-            //Delegates to req.load. Broken out as a separate function to
-            //allow overriding in the optimizer.
-            load: function (id, url) {
-                req.load(context, id, url);
-            },
-
-            /**
-             * Executes a module callback function. Broken out as a separate function
-             * solely to allow the build system to sequence the files in the built
-             * layer in the right sequence.
-             *
-             * @private
-             */
-            execCb: function (name, callback, args, exports) {
-                return callback.apply(exports, args);
-            },
-
-            /**
-             * callback for script loads, used to check status of loading.
-             *
-             * @param {Event} evt the event from the browser for the script
-             * that was loaded.
-             */
-            onScriptLoad: function (evt) {
-                //Using currentTarget instead of target for Firefox 2.0's sake. Not
-                //all old browsers will be supported, but this one was easy enough
-                //to support and still makes sense.
-                if (evt.type === 'load' ||
-                        (readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
-                    //Reset interactive script so a script node is not held onto for
-                    //to long.
-                    interactiveScript = null;
-
-                    //Pull out the name of the module and the context.
-                    var data = getScriptData(evt);
-                    context.completeLoad(data.id);
-                }
-            },
-
-            /**
-             * Callback for script errors.
-             */
-            onScriptError: function (evt) {
-                var data = getScriptData(evt);
-                if (!hasPathFallback(data.id)) {
-                    var parents = [];
-                    eachProp(registry, function(value, key) {
-                        if (key.indexOf('_@r') !== 0) {
-                            each(value.depMaps, function(depMap) {
-                                if (depMap.id === data.id) {
-                                    parents.push(key);
-                                }
-                                return true;
-                            });
-                        }
-                    });
-                    return onError(makeError('scripterror', 'Script error for "' + data.id +
-                                             (parents.length ?
-                                             '", needed by: ' + parents.join(', ') :
-                                             '"'), evt, [data.id]));
-                }
-            }
-        };
-
-        context.require = context.makeRequire();
-        return context;
-    }
-
-    /**
-     * Main entry point.
-     *
-     * If the only argument to require is a string, then the module that
-     * is represented by that string is fetched for the appropriate context.
-     *
-     * If the first argument is an array, then it will be treated as an array
-     * of dependency string names to fetch. An optional function callback can
-     * be specified to execute when all of those dependencies are available.
-     *
-     * Make a local req variable to help Caja compliance (it assumes things
-     * on a require that are not standardized), and to give a short
-     * name for minification/local scope use.
-     */
-    req = requirejs = function (deps, callback, errback, optional) {
-
-        //Find the right context, use default
-        var context, config,
-            contextName = defContextName;
-
-        // Determine if have config object in the call.
-        if (!isArray(deps) && typeof deps !== 'string') {
-            // deps is a config object
-            config = deps;
-            if (isArray(callback)) {
-                // Adjust args if there are dependencies
-                deps = callback;
-                callback = errback;
-                errback = optional;
-            } else {
-                deps = [];
-            }
-        }
-
-        if (config && config.context) {
-            contextName = config.context;
-        }
-
-        context = getOwn(contexts, contextName);
-        if (!context) {
-            context = contexts[contextName] = req.s.newContext(contextName);
-        }
-
-        if (config) {
-            context.configure(config);
-        }
-
-        return context.require(deps, callback, errback);
-    };
-
-    /**
-     * Support require.config() to make it easier to cooperate with other
-     * AMD loaders on globally agreed names.
-     */
-    req.config = function (config) {
-        return req(config);
-    };
-
-    /**
-     * Execute something after the current tick
-     * of the event loop. Override for other envs
-     * that have a better solution than setTimeout.
-     * @param  {Function} fn function to execute later.
-     */
-    req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
-        setTimeout(fn, 4);
-    } : function (fn) { fn(); };
-
-    /**
-     * Export require as a global, but only if it does not already exist.
-     */
-    if (!require) {
-        require = req;
-    }
-
-    req.version = version;
-
-    //Used to filter out dependencies that are already paths.
-    req.jsExtRegExp = /^\/|:|\?|\.js$/;
-    req.isBrowser = isBrowser;
-    s = req.s = {
-        contexts: contexts,
-        newContext: newContext
-    };
-
-    //Create default context.
-    req({});
-
-    //Exports some context-sensitive methods on global require.
-    each([
-        'toUrl',
-        'undef',
-        'defined',
-        'specified'
-    ], function (prop) {
-        //Reference from contexts instead of early binding to default context,
-        //so that during builds, the latest instance of the default context
-        //with its config gets used.
-        req[prop] = function () {
-            var ctx = contexts[defContextName];
-            return ctx.require[prop].apply(ctx, arguments);
-        };
-    });
-
-    if (isBrowser) {
-        head = s.head = document.getElementsByTagName('head')[0];
-        //If BASE tag is in play, using appendChild is a problem for IE6.
-        //When that browser dies, this can be removed. Details in this jQuery bug:
-        //http://dev.jquery.com/ticket/2709
-        baseElement = document.getElementsByTagName('base')[0];
-        if (baseElement) {
-            head = s.head = baseElement.parentNode;
-        }
-    }
-
-    /**
-     * Any errors that require explicitly generates will be passed to this
-     * function. Intercept/override it if you want custom error handling.
-     * @param {Error} err the error object.
-     */
-    req.onError = defaultOnError;
-
-    /**
-     * Creates the node for the load command. Only used in browser envs.
-     */
-    req.createNode = function (config, moduleName, url) {
-        var node = config.xhtml ?
-                document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
-                document.createElement('script');
-        node.type = config.scriptType || 'text/javascript';
-        node.charset = 'utf-8';
-        node.async = true;
-        return node;
-    };
-
-    /**
-     * Does the request to load a module for the browser case.
-     * Make this a separate function to allow other environments
-     * to override it.
-     *
-     * @param {Object} context the require context to find state.
-     * @param {String} moduleName the name of the module.
-     * @param {Object} url the URL to the module.
-     */
-    req.load = function (context, moduleName, url) {
-        var config = (context && context.config) || {},
-            node;
-        if (isBrowser) {
-            //In the browser so use a script tag
-            node = req.createNode(config, moduleName, url);
-            if (config.onNodeCreated) {
-                config.onNodeCreated(node, config, moduleName, url);
-            }
-
-            node.setAttribute('data-requirecontext', context.contextName);
-            node.setAttribute('data-requiremodule', moduleName);
-
-            //Set up load listener. Test attachEvent first because IE9 has
-            //a subtle issue in its addEventListener and script onload firings
-            //that do not match the behavior of all other browsers with
-            //addEventListener support, which fire the onload event for a
-            //script right after the script execution. See:
-            //https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
-            //UNFORTUNATELY Opera implements attachEvent but does not follow the script
-            //script execution mode.
-            if (node.attachEvent &&
-                    //Check if node.attachEvent is artificially added by custom script or
-                    //natively supported by browser
-                    //read https://github.com/jrburke/requirejs/issues/187
-                    //if we can NOT find [native code] then it must NOT natively supported.
-                    //in IE8, node.attachEvent does not have toString()
-                    //Note the test for "[native code" with no closing brace, see:
-                    //https://github.com/jrburke/requirejs/issues/273
-                    !(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
-                    !isOpera) {
-                //Probably IE. IE (at least 6-8) do not fire
-                //script onload right after executing the script, so
-                //we cannot tie the anonymous define call to a name.
-                //However, IE reports the script as being in 'interactive'
-                //readyState at the time of the define call.
-                useInteractive = true;
-
-                node.attachEvent('onreadystatechange', context.onScriptLoad);
-                //It would be great to add an error handler here to catch
-                //404s in IE9+. However, onreadystatechange will fire before
-                //the error handler, so that does not help. If addEventListener
-                //is used, then IE will fire error before load, but we cannot
-                //use that pathway given the connect.microsoft.com issue
-                //mentioned above

<TRUNCATED>

[47/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
index 18a5a54..e1a0bb9 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
@@ -30,26 +30,27 @@ import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.CSVPrinter;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 
 public class OutputFile extends Output {
-  static Logger logger = Logger.getLogger(OutputFile.class);
+  private static final Logger LOG = Logger.getLogger(OutputFile.class);
 
-  PrintWriter outWriter = null;
-  String filePath = null;
-  String codec;
+  private PrintWriter outWriter;
+  private String filePath = null;
+  private String codec;
 
   @Override
   public void init() throws Exception {
     super.init();
 
     filePath = getStringValue("path");
-    if (filePath == null || filePath.isEmpty()) {
-      logger.error("Filepath config property <path> is not set in config file.");
+    if (StringUtils.isEmpty(filePath)) {
+      LOG.error("Filepath config property <path> is not set in config file.");
       return;
     }
     codec = getStringValue("codec");
-    if (codec == null || codec.trim().isEmpty()) {
+    if (StringUtils.isBlank(codec)) {
       codec = "json";
     } else {
       if (codec.trim().equalsIgnoreCase("csv")) {
@@ -57,12 +58,11 @@ public class OutputFile extends Output {
       } else if (codec.trim().equalsIgnoreCase("json")) {
         codec = "csv";
       } else {
-        logger.error("Unsupported codec type. codec=" + codec
-          + ", will use json");
+        LOG.error("Unsupported codec type. codec=" + codec + ", will use json");
         codec = "json";
       }
     }
-    logger.info("Out filePath=" + filePath + ", codec=" + codec);
+    LOG.info("Out filePath=" + filePath + ", codec=" + codec);
     File outFile = new File(filePath);
     if (outFile.getParentFile() != null) {
       File parentDir = outFile.getParentFile();
@@ -71,16 +71,14 @@ public class OutputFile extends Output {
       }
     }
 
-    outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile,
-      true)));
+    outWriter = new PrintWriter(new BufferedWriter(new FileWriter(outFile, true)));
 
-    logger.info("init() is successfull. filePath="
-      + outFile.getAbsolutePath());
+    LOG.info("init() is successfull. filePath=" + outFile.getAbsolutePath());
   }
 
   @Override
   public void close() {
-    logger.info("Closing file." + getShortDescription());
+    LOG.info("Closing file." + getShortDescription());
     if (outWriter != null) {
       try {
         outWriter.close();
@@ -92,8 +90,7 @@ public class OutputFile extends Output {
   }
 
   @Override
-  public void write(Map<String, Object> jsonObj, InputMarker inputMarker)
-    throws Exception {
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) throws Exception {
     String outStr = null;
     CSVPrinter csvPrinter = null;
     try {
@@ -104,7 +101,7 @@ public class OutputFile extends Output {
         outStr = LogFeederUtil.getGson().toJson(jsonObj);
       }
       if (outWriter != null && outStr != null) {
-        statMetric.count++;
+        statMetric.value++;
 
         outWriter.println(outStr);
         outWriter.flush();
@@ -122,7 +119,7 @@ public class OutputFile extends Output {
   @Override
   synchronized public void write(String block, InputMarker inputMarker) throws Exception {
     if (outWriter != null && block != null) {
-      statMetric.count++;
+      statMetric.value++;
 
       outWriter.println(block);
       outWriter.flush();
@@ -135,10 +132,7 @@ public class OutputFile extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=file");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=file");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
index a360215..8f4b0b1 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
@@ -43,7 +43,8 @@ import java.util.concurrent.ConcurrentLinkedQueue;
  * The events are spooled on the local file system and uploaded in batches asynchronously.
  */
 public class OutputHDFSFile extends Output implements RolloverHandler, RolloverCondition {
-  private final static Logger logger = Logger.getLogger(OutputHDFSFile.class);
+  private static final Logger LOG = Logger.getLogger(OutputHDFSFile.class);
+  
   private static final long DEFAULT_ROLLOVER_THRESHOLD_TIME_SECONDS = 5 * 60L;// 5 min by default
 
   private ConcurrentLinkedQueue<File> localReadyFiles = new ConcurrentLinkedQueue<File>();
@@ -72,23 +73,20 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
     rolloverThresholdTimeMillis = rolloverThresholdTimeSeconds * 1000L;
     filenamePrefix = getStringValue("file_name_prefix", filenamePrefix);
     if (StringUtils.isEmpty(hdfsOutDir)) {
-      logger
-          .error("HDFS config property <hdfs_out_dir> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_out_dir> is not set in config file.");
       return;
     }
     if (StringUtils.isEmpty(hdfsHost)) {
-      logger
-          .error("HDFS config property <hdfs_host> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_host> is not set in config file.");
       return;
     }
     if (StringUtils.isEmpty(hdfsPort)) {
-      logger
-          .error("HDFS config property <hdfs_port> is not set in config file.");
+      LOG.error("HDFS config property <hdfs_port> is not set in config file.");
       return;
     }
     HashMap<String, String> contextParam = buildContextParam();
     hdfsOutDir = PlaceholderUtil.replaceVariables(hdfsOutDir, contextParam);
-    logger.info("hdfs Output dir=" + hdfsOutDir);
+    LOG.info("hdfs Output dir=" + hdfsOutDir);
     String localFileDir = LogFeederUtil.getLogfeederTempDir() + "hdfs/service/";
     logSpooler = new LogSpooler(localFileDir, filenamePrefix, this, this);
     this.startHDFSCopyThread();
@@ -96,18 +94,17 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
 
   @Override
   public void close() {
-    logger.info("Closing file." + getShortDescription());
+    LOG.info("Closing file." + getShortDescription());
     logSpooler.rollover();
     this.stopHDFSCopyThread();
     isClosed = true;
   }
 
   @Override
-  synchronized public void write(String block, InputMarker inputMarker)
-      throws Exception {
+  public synchronized void write(String block, InputMarker inputMarker) throws Exception {
     if (block != null) {
       logSpooler.add(block);
-      statMetric.count++;
+      statMetric.value++;
     }
   }
 
@@ -127,24 +124,19 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
             Iterator<File> localFileIterator = localReadyFiles.iterator();
             while (localFileIterator.hasNext()) {
               File localFile = localFileIterator.next();
-              fileSystem = LogfeederHDFSUtil.INSTANCE.buildFileSystem(hdfsHost,
-                  hdfsPort);
+              fileSystem = LogfeederHDFSUtil.buildFileSystem(hdfsHost, hdfsPort);
               if (fileSystem != null && localFile.exists()) {
                 String destFilePath = hdfsOutDir + "/" + localFile.getName();
                 String localPath = localFile.getAbsolutePath();
                 boolean overWrite = true;
                 boolean delSrc = true;
-                boolean isCopied = LogfeederHDFSUtil.INSTANCE.copyFromLocal(
-                    localFile.getAbsolutePath(), destFilePath, fileSystem,
+                boolean isCopied = LogfeederHDFSUtil.copyFromLocal(localFile.getAbsolutePath(), destFilePath, fileSystem,
                     overWrite, delSrc);
                 if (isCopied) {
-                  logger.debug("File copy to hdfs hdfspath :" + destFilePath
-                      + " and deleted local file :" + localPath);
+                  LOG.debug("File copy to hdfs hdfspath :" + destFilePath + " and deleted local file :" + localPath);
                 } else {
-                  // TODO Need to write retry logic, in next release we can
-                  // handle it
-                  logger.error("Hdfs file copy  failed for hdfspath :"
-                      + destFilePath + " and localpath :" + localPath);
+                  // TODO Need to write retry logic, in next release we can handle it
+                  LOG.error("Hdfs file copy  failed for hdfspath :" + destFilePath + " and localpath :" + localPath);
                 }
               }
               localFileIterator.remove();
@@ -157,14 +149,11 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
                 }
               }
             } catch (InterruptedException e) {
-              logger.error(e.getLocalizedMessage(),e);
+              LOG.error(e.getLocalizedMessage(),e);
             }
           }
         } catch (Exception e) {
-          logger
-              .error(
-                  "Exception in hdfsCopyThread errorMsg:"
-                      + e.getLocalizedMessage(), e);
+          LOG.error("Exception in hdfsCopyThread errorMsg:" + e.getLocalizedMessage(), e);
         }
       }
     };
@@ -174,24 +163,23 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
 
   private void stopHDFSCopyThread() {
     if (hdfsCopyThread != null) {
-      logger.info("waiting till copy all local files to hdfs.......");
+      LOG.info("waiting till copy all local files to hdfs.......");
       while (!localReadyFiles.isEmpty()) {
         try {
           Thread.sleep(1000);
         } catch (InterruptedException e) {
-          logger.error(e.getLocalizedMessage(), e);
+          LOG.error(e.getLocalizedMessage(), e);
         }
-        logger.debug("still waiting to copy all local files to hdfs.......");
+        LOG.debug("still waiting to copy all local files to hdfs.......");
       }
-      logger.info("calling interrupt method for hdfsCopyThread to stop it.");
+      LOG.info("calling interrupt method for hdfsCopyThread to stop it.");
       try {
         hdfsCopyThread.interrupt();
       } catch (SecurityException exception) {
-        logger.error(" Current thread : '" + Thread.currentThread().getName()
-            + "' does not have permission to interrupt the Thread: '"
-            + hdfsCopyThread.getName() + "'");
+        LOG.error(" Current thread : '" + Thread.currentThread().getName() +
+            "' does not have permission to interrupt the Thread: '" + hdfsCopyThread.getName() + "'");
       }
-      LogfeederHDFSUtil.INSTANCE.closeFileSystem(fileSystem);
+      LogfeederHDFSUtil.closeFileSystem(fileSystem);
     }
   }
 
@@ -208,15 +196,13 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
         readyMonitor.notifyAll();
       }
     } catch (Exception e) {
-      logger.error(e.getLocalizedMessage(),e);
+      LOG.error(e.getLocalizedMessage(),e);
     }
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=hdfs");     
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=hdfs");
   }
 
   /**
@@ -242,8 +228,8 @@ public class OutputHDFSFile extends Output implements RolloverHandler, RolloverC
     long timeSinceCreation = new Date().getTime() - currentSpoolerContext.getActiveLogCreationTime().getTime();
     boolean shouldRollover = timeSinceCreation > rolloverThresholdTimeMillis;
     if (shouldRollover) {
-      logger.info("Detecting that time since file creation time " + currentSpoolerContext.getActiveLogCreationTime() +
-                    " has crossed threshold (msecs) " + rolloverThresholdTimeMillis);
+      LOG.info("Detecting that time since file creation time " + currentSpoolerContext.getActiveLogCreationTime() +
+          " has crossed threshold (msecs) " + rolloverThresholdTimeMillis);
     }
     return shouldRollover;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
index 2595d87..52fc6f8 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
@@ -56,6 +56,16 @@ public class OutputKafka extends Output {
   private boolean isKafkaBrokerUp = false;
 
   @Override
+  protected String getStatMetricName() {
+    return "output.kafka.write_logs";
+  }
+
+  @Override
+  protected String getWriteBytesMetricName() {
+    return "output.kafka.write_bytes";
+  }
+  
+  @Override
   public void init() throws Exception {
     super.init();
     Properties props = initProperties();
@@ -65,9 +75,6 @@ public class OutputKafka extends Output {
   }
 
   private Properties initProperties() throws Exception {
-    statMetric.metricsName = "output.kafka.write_logs";
-    writeBytesMetric.metricsName = "output.kafka.write_bytes";
-
     String brokerList = getStringValue("broker_list");
     if (StringUtils.isEmpty(brokerList)) {
       throw new Exception("For kafka output, bootstrap broker_list is needed");
@@ -124,17 +131,15 @@ public class OutputKafka extends Output {
             if (publishMessage(kafkaCallBack.message, kafkaCallBack.inputMarker)) {
               kafkaCallBack = null;
             } else {
-              LOG.error("Kafka is down. messageNumber=" + kafkaCallBack.thisMessageNumber + ". Going to sleep for "
-                  + FAILED_RETRY_INTERVAL + " seconds");
+              LOG.error("Kafka is down. messageNumber=" + kafkaCallBack.thisMessageNumber + ". Going to sleep for " +
+                  FAILED_RETRY_INTERVAL + " seconds");
               Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
             }
 
           } catch (Throwable t) {
             String logMessageKey = this.getClass().getSimpleName() + "_KAFKA_RETRY_WRITE_ERROR";
-            LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-                "Error sending message to Kafka during retry. message="
-                    + (kafkaCallBack == null ? null : kafkaCallBack.message),
-                t, LOG, Level.ERROR);
+            LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending message to Kafka during retry. message=" +
+                (kafkaCallBack == null ? null : kafkaCallBack.message), t, LOG, Level.ERROR);
           }
         }
 
@@ -160,8 +165,8 @@ public class OutputKafka extends Output {
           LOG.error("Kafka is down. Going to sleep for " + FAILED_RETRY_INTERVAL + " seconds");
           Thread.sleep(FAILED_RETRY_INTERVAL * 1000);
         } else {
-          LOG.warn("Kafka is still catching up from previous failed messages. outstanding messages="
-              + failedMessages.size() + " Going to sleep for " + CATCHUP_RETRY_INTERVAL + " seconds");
+          LOG.warn("Kafka is still catching up from previous failed messages. outstanding messages=" + failedMessages.size() +
+              " Going to sleep for " + CATCHUP_RETRY_INTERVAL + " seconds");
           Thread.sleep(CATCHUP_RETRY_INTERVAL * 1000);
         }
       } catch (Throwable t) {
@@ -198,16 +203,15 @@ public class OutputKafka extends Output {
 
   private boolean publishMessage(String block, InputMarker inputMarker) {
     if (isAsync && isKafkaBrokerUp) { // Send asynchronously
-      producer.send(new ProducerRecord<String, String>(topic, block),
-          new KafkaCallBack(this, block, inputMarker, ++messageCount));
+      producer.send(new ProducerRecord<String, String>(topic, block), new KafkaCallBack(this, block, inputMarker, ++messageCount));
       return true;
     } else { // Send synchronously
       try {
         // Not using key. Let it round robin
         RecordMetadata metadata = producer.send(new ProducerRecord<String, String>(topic, block)).get();
         if (metadata != null) {
-          statMetric.count++;
-          writeBytesMetric.count += block.length();
+          statMetric.value++;
+          writeBytesMetric.value += block.length();
         }
         if (!isKafkaBrokerUp) {
           LOG.info("Started writing to kafka. " + getShortDescription());
@@ -217,18 +221,18 @@ public class OutputKafka extends Output {
       } catch (InterruptedException e) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_INTERRUPT";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "InterruptedException-Error sending message to Kafka", e,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "InterruptedException-Error sending message to Kafka", e, LOG,
+            Level.ERROR);
       } catch (ExecutionException e) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_EXECUTION";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "ExecutionException-Error sending message to Kafka", e,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "ExecutionException-Error sending message to Kafka", e, LOG,
+            Level.ERROR);
       } catch (Throwable t) {
         isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_WRITE_ERROR";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "GenericException-Error sending message to Kafka", t,
-            LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "GenericException-Error sending message to Kafka", t, LOG,
+            Level.ERROR);
       }
     }
     return false;
@@ -260,12 +264,12 @@ public class OutputKafka extends Output {
           output.isKafkaBrokerUp = true;
         }
         output.incrementStat(1);
-        output.writeBytesMetric.count += message.length();
+        output.writeBytesMetric.value += message.length();
       } else {
         output.isKafkaBrokerUp = false;
         String logKeyMessage = this.getClass().getSimpleName() + "_KAFKA_ASYNC_ERROR";
-        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "Error sending message to Kafka. Async Callback",
-            exception, LOG, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(logKeyMessage, "Error sending message to Kafka. Async Callback", exception, LOG,
+            Level.ERROR);
 
         output.failedMessages.add(this);
       }
@@ -273,9 +277,7 @@ public class OutputKafka extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=kafka");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=kafka");
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
new file mode 100644
index 0000000..2c81c19
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
@@ -0,0 +1,250 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.logconfig.FilterLogData;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.MurmurHash;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class OutputManager {
+  private static final Logger LOG = Logger.getLogger(OutputManager.class);
+
+  private static final int HASH_SEED = 31174077;
+  private static final int MAX_OUTPUT_SIZE = 32765; // 32766-1
+
+  private List<Output> outputs = new ArrayList<Output>();
+
+  private boolean addMessageMD5 = true;
+
+  private static long docCounter = 0;
+  private MetricData messageTruncateMetric = new MetricData(null, false);
+
+  public List<Output> getOutputs() {
+    return outputs;
+  }
+
+  public void add(Output output) {
+    this.outputs.add(output);
+  }
+
+  public void retainUsedOutputs(Collection<Output> usedOutputs) {
+    outputs.retainAll(usedOutputs);
+  }
+
+  public void init() throws Exception {
+    for (Output output : outputs) {
+      output.init();
+    }
+  }
+
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+
+    // Update the block with the context fields
+    for (Map.Entry<String, String> entry : input.getContextFields().entrySet()) {
+      if (jsonObj.get(entry.getKey()) == null) {
+        jsonObj.put(entry.getKey(), entry.getValue());
+      }
+    }
+
+    // TODO: Ideally most of the overrides should be configurable
+
+    if (jsonObj.get("type") == null) {
+      jsonObj.put("type", input.getStringValue("type"));
+    }
+    if (jsonObj.get("path") == null && input.getFilePath() != null) {
+      jsonObj.put("path", input.getFilePath());
+    }
+    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
+      jsonObj.put("path", input.getStringValue("path"));
+    }
+    if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
+      jsonObj.put("host", LogFeederUtil.hostName);
+    }
+    if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
+      jsonObj.put("ip", LogFeederUtil.ipAddress);
+    }
+    if (jsonObj.get("level") == null) {
+      jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
+    }
+    
+    if (input.isUseEventMD5() || input.isGenEventMD5()) {
+      String prefix = "";
+      Object logtimeObj = jsonObj.get("logtime");
+      if (logtimeObj != null) {
+        if (logtimeObj instanceof Date) {
+          prefix = "" + ((Date) logtimeObj).getTime();
+        } else {
+          prefix = logtimeObj.toString();
+        }
+      }
+      
+      Long eventMD5 = MurmurHash.hash64A(LogFeederUtil.getGson().toJson(jsonObj).getBytes(), HASH_SEED);
+      if (input.isGenEventMD5()) {
+        jsonObj.put("event_md5", prefix + eventMD5.toString());
+      }
+      if (input.isUseEventMD5()) {
+        jsonObj.put("id", prefix + eventMD5.toString());
+      }
+    }
+
+    jsonObj.put("seq_num", new Long(docCounter++));
+    if (jsonObj.get("id") == null) {
+      jsonObj.put("id", UUID.randomUUID().toString());
+    }
+    if (jsonObj.get("event_count") == null) {
+      jsonObj.put("event_count", new Integer(1));
+    }
+    if (inputMarker.lineNumber > 0) {
+      jsonObj.put("logfile_line_number", new Integer(inputMarker.lineNumber));
+    }
+    if (jsonObj.containsKey("log_message")) {
+      // TODO: Let's check size only for log_message for now
+      String logMessage = (String) jsonObj.get("log_message");
+      logMessage = truncateLongLogMessage(jsonObj, input, logMessage);
+      if (addMessageMD5) {
+        jsonObj.put("message_md5", "" + MurmurHash.hash64A(logMessage.getBytes(), 31174077));
+      }
+    }
+    
+    if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
+      for (Output output : input.getOutputList()) {
+        try {
+          output.write(jsonObj, inputMarker);
+        } catch (Exception e) {
+          LOG.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private String truncateLongLogMessage(Map<String, Object> jsonObj, Input input, String logMessage) {
+    if (logMessage != null && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
+      messageTruncateMetric.value++;
+      String logMessageKey = this.getClass().getSimpleName() + "_MESSAGESIZE";
+      LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Message is too big. size=" + logMessage.getBytes().length +
+          ", input=" + input.getShortDescription() + ". Truncating to " + MAX_OUTPUT_SIZE + ", first upto 100 characters=" +
+          StringUtils.abbreviate(logMessage, 100), null, LOG, Level.WARN);
+      logMessage = new String(logMessage.getBytes(), 0, MAX_OUTPUT_SIZE);
+      jsonObj.put("log_message", logMessage);
+      List<String> tagsList = (List<String>) jsonObj.get("tags");
+      if (tagsList == null) {
+        tagsList = new ArrayList<String>();
+        jsonObj.put("tags", tagsList);
+      }
+      tagsList.add("error_message_truncated");
+    }
+    return logMessage;
+  }
+
+  public void write(String jsonBlock, InputMarker inputMarker) {
+    if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
+      for (Output output : inputMarker.input.getOutputList()) {
+        try {
+          output.write(jsonBlock, inputMarker);
+        } catch (Exception e) {
+          LOG.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  public void copyFile(File inputFile, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+    for (Output output : input.getOutputList()) {
+      try {
+        output.copyFile(inputFile, inputMarker);
+      }catch (Exception e) {
+        LOG.error("Error coyping file . to " + output.getShortDescription(), e);
+      }
+    }
+  }
+
+  public void logStats() {
+    for (Output output : outputs) {
+      output.logStat();
+    }
+    LogFeederUtil.logStatForMetric(messageTruncateMetric, "Stat: Messages Truncated", "");
+  }
+
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    metricsList.add(messageTruncateMetric);
+    for (Output output : outputs) {
+      output.addMetricsContainers(metricsList);
+    }
+  }
+
+  public void close() {
+    LOG.info("Close called for outputs ...");
+    for (Output output : outputs) {
+      try {
+        output.setDrain(true);
+        output.close();
+      } catch (Exception e) {
+        // Ignore
+      }
+    }
+    
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    for (int i = 0; i < iterations; i++) {
+      boolean allClosed = true;
+      for (Output output : outputs) {
+        if (!output.isClosed()) {
+          try {
+            allClosed = false;
+            LOG.warn("Waiting for output to close. " + output.getShortDescription() + ", " + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        LOG.info("All outputs are closed. Iterations=" + i);
+        return;
+      }
+    }
+
+    LOG.warn("Some outpus were not closed after " + iterations + "  iterations");
+    for (Output output : outputs) {
+      if (!output.isClosed()) {
+        LOG.warn("Output not closed. Will ignore it." + output.getShortDescription() + ", pendingCound=" + output.getPendingCount());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
deleted file mode 100644
index 0a6b7fa..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.output;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-public class OutputMgr {
-  private static final Logger logger = Logger.getLogger(OutputMgr.class);
-
-  private Collection<Output> outputList = new ArrayList<Output>();
-
-  private boolean addMessageMD5 = true;
-
-  private int MAX_OUTPUT_SIZE = 32765; // 32766-1
-  private static long doc_counter = 0;
-  private MetricCount messageTruncateMetric = new MetricCount();
-
-  
-  public Collection<Output> getOutputList() {
-    return outputList;
-  }
-
-  public void setOutputList(Collection<Output> outputList) {
-    this.outputList = outputList;
-  }
-
-  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
-    Input input = inputMarker.input;
-
-    // Update the block with the context fields
-    for (Map.Entry<String, String> entry : input.getContextFields()
-      .entrySet()) {
-      if (jsonObj.get(entry.getKey()) == null) {
-        jsonObj.put(entry.getKey(), entry.getValue());
-      }
-    }
-
-    // TODO: Ideally most of the overrides should be configurable
-
-    // Add the input type
-    if (jsonObj.get("type") == null) {
-      jsonObj.put("type", input.getStringValue("type"));
-    }
-    if (jsonObj.get("path") == null && input.getFilePath() != null) {
-      jsonObj.put("path", input.getFilePath());
-    }
-    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
-      jsonObj.put("path", input.getStringValue("path"));
-    }
-
-    // Add host if required
-    if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
-      jsonObj.put("host", LogFeederUtil.hostName);
-    }
-    // Add IP if required
-    if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
-      jsonObj.put("ip", LogFeederUtil.ipAddress);
-    }
-    
-    //Add level
-    if (jsonObj.get("level") == null) {
-      jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
-    }
-    if (input.isUseEventMD5() || input.isGenEventMD5()) {
-      String prefix = "";
-      Object logtimeObj = jsonObj.get("logtime");
-      if (logtimeObj != null) {
-        if (logtimeObj instanceof Date) {
-          prefix = "" + ((Date) logtimeObj).getTime();
-        } else {
-          prefix = logtimeObj.toString();
-        }
-      }
-      Long eventMD5 = LogFeederUtil.genHash(LogFeederUtil.getGson()
-        .toJson(jsonObj));
-      if (input.isGenEventMD5()) {
-        jsonObj.put("event_md5", prefix + eventMD5.toString());
-      }
-      if (input.isUseEventMD5()) {
-        jsonObj.put("id", prefix + eventMD5.toString());
-      }
-    }
-
-    // jsonObj.put("@timestamp", new Date());
-    jsonObj.put("seq_num", new Long(doc_counter++));
-    if (jsonObj.get("id") == null) {
-      jsonObj.put("id", UUID.randomUUID().toString());
-    }
-    if (jsonObj.get("event_count") == null) {
-      jsonObj.put("event_count", new Integer(1));
-    }
-    if (inputMarker.lineNumber > 0) {
-      jsonObj.put("logfile_line_number", new Integer(
-        inputMarker.lineNumber));
-    }
-    if (jsonObj.containsKey("log_message")) {
-      // TODO: Let's check size only for log_message for now
-      String logMessage = (String) jsonObj.get("log_message");
-      if (logMessage != null
-        && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
-        messageTruncateMetric.count++;
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-          + "_MESSAGESIZE";
-        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-          "Message is too big. size="
-            + logMessage.getBytes().length + ", input="
-            + input.getShortDescription()
-            + ". Truncating to " + MAX_OUTPUT_SIZE
-            + ", first upto 100 characters="
-            + LogFeederUtil.subString(logMessage, 100),
-          null, logger, Level.WARN);
-        logMessage = new String(logMessage.getBytes(), 0,
-          MAX_OUTPUT_SIZE);
-        jsonObj.put("log_message", logMessage);
-        // Add error tags
-        @SuppressWarnings("unchecked")
-        List<String> tagsList = (List<String>) jsonObj.get("tags");
-        if (tagsList == null) {
-          tagsList = new ArrayList<String>();
-          jsonObj.put("tags", tagsList);
-        }
-        tagsList.add("error_message_truncated");
-
-      }
-      if (addMessageMD5) {
-        jsonObj.put("message_md5",
-          "" + LogFeederUtil.genHash(logMessage));
-      }
-    }
-    //check log is allowed to send output
-    if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
-      for (Output output : input.getOutputList()) {
-        try {
-          output.write(jsonObj, inputMarker);
-        } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
-        }
-      }
-    }
-  }
-
-  public void write(String jsonBlock, InputMarker inputMarker) {
-    //check log is allowed to send output
-    if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
-      for (Output output : inputMarker.input.getOutputList()) {
-        try {
-          output.write(jsonBlock, inputMarker);
-        } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
-        }
-      }
-    }
-  }
-
-  public void close() {
-    logger.info("Close called for outputs ...");
-    for (Output output : outputList) {
-      try {
-        output.setDrain(true);
-        output.close();
-      } catch (Exception e) {
-        // Ignore
-      }
-    }
-    // Need to get this value from property
-    int iterations = 30;
-    int waitTimeMS = 1000;
-    int i;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Output output : outputList) {
-        if (!output.isClosed()) {
-          try {
-            allClosed = false;
-            logger.warn("Waiting for output to close. "
-              + output.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
-          }
-        }
-      }
-      if (allClosed) {
-        break;
-      }
-    }
-
-    if (!allClosed) {
-      logger.warn("Some outpus were not closed. Iterations=" + i);
-      for (Output output : outputList) {
-        if (!output.isClosed()) {
-          logger.warn("Output not closed. Will ignore it."
-            + output.getShortDescription() + ", pendingCound="
-            + output.getPendingCount());
-        }
-      }
-    } else {
-      logger.info("All outputs are closed. Iterations=" + i);
-    }
-  }
-
-  public void logStats() {
-    for (Output output : outputList) {
-      output.logStat();
-    }
-    LogFeederUtil.logStatForMetric(messageTruncateMetric,
-      "Stat: Messages Truncated", null);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    metricsList.add(messageTruncateMetric);
-    for (Output output : outputList) {
-      output.addMetricsContainers(metricsList);
-    }
-  }
-
-  
-  public void copyFile(File inputFile, InputMarker inputMarker) {
-    Input input = inputMarker.input;
-    for (Output output : input.getOutputList()) {
-      try {
-        output.copyFile(inputFile, inputMarker);
-      }catch (Exception e) {
-        logger.error("Error coyping file . to " + output.getShortDescription(),
-            e);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
index e95f8df..26f1ddb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
@@ -22,6 +22,7 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import org.apache.ambari.logfeeder.LogFeeder;
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.spool.LogSpooler;
@@ -47,10 +48,10 @@ import java.util.Map.Entry;
  * </ul>
  */
 public class OutputS3File extends Output implements RolloverCondition, RolloverHandler {
+  private static final Logger LOG = Logger.getLogger(OutputS3File.class);
 
   public static final String INPUT_ATTRIBUTE_TYPE = "type";
   public static final String GLOBAL_CONFIG_S3_PATH_SUFFIX = "global.config.json";
-  static private Logger logger = Logger.getLogger(OutputS3File.class);
 
   private LogSpooler logSpooler;
   private S3OutputConfiguration s3OutputConfiguration;
@@ -72,23 +73,21 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   @Override
   public void copyFile(File inputFile, InputMarker inputMarker) {
     String type = inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE);
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration,
-        S3Util.INSTANCE, false, type);
-    String resolvedPath = s3Uploader.uploadFile(inputFile,
-        inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, type);
+    String resolvedPath = s3Uploader.uploadFile(inputFile, inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
 
     uploadConfig(inputMarker, type, s3OutputConfiguration, resolvedPath);
   }
 
-  private void uploadConfig(InputMarker inputMarker, String type,
-                            S3OutputConfiguration s3OutputConfiguration, String resolvedPath) {
+  private void uploadConfig(InputMarker inputMarker, String type, S3OutputConfiguration s3OutputConfiguration,
+      String resolvedPath) {
 
     ArrayList<Map<String, Object>> filters = new ArrayList<>();
     addFilters(filters, inputMarker.input.getFirstFilter());
     Map<String, Object> inputConfig = new HashMap<>();
     inputConfig.putAll(inputMarker.input.getConfigs());
-    String s3CompletePath = S3Util.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName()
-        + S3Util.S3_PATH_SEPARATOR + resolvedPath;
+    String s3CompletePath = LogFeederConstants.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName() +
+        LogFeederConstants.S3_PATH_SEPARATOR + resolvedPath;
     inputConfig.put("path", s3CompletePath);
 
     ArrayList<Map<String, Object>> inputConfigList = new ArrayList<>();
@@ -117,17 +116,15 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
     }
   }
 
-  private void writeConfigToS3(Map<String, Object> configToWrite, String s3KeySuffix,
-                              S3OutputConfiguration s3OutputConfiguration) {
+  private void writeConfigToS3(Map<String, Object> configToWrite, String s3KeySuffix, S3OutputConfiguration s3OutputConfiguration) {
     Gson gson = new GsonBuilder().setPrettyPrinting().create();
     String configJson = gson.toJson(configToWrite);
 
-    String s3ResolvedKey = new S3LogPathResolver().
-        getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix, s3OutputConfiguration.getCluster());
+    String s3ResolvedKey = new S3LogPathResolver().getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix,
+        s3OutputConfiguration.getCluster());
 
-    S3Util.INSTANCE.writeIntoS3File(configJson, s3OutputConfiguration.getS3BucketName(),
-        s3ResolvedKey, s3OutputConfiguration.getS3AccessKey(),
-        s3OutputConfiguration.getS3SecretKey());
+    S3Util.writeIntoS3File(configJson, s3OutputConfiguration.getS3BucketName(), s3ResolvedKey,
+        s3OutputConfiguration.getS3AccessKey(), s3OutputConfiguration.getS3SecretKey());
   }
 
   private String getComponentConfigFileName(String componentName) {
@@ -136,7 +133,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
 
 
   private Map<String, Object> getGlobalConfig() {
-    Map<String, Object> globalConfig = LogFeeder.globalMap;
+    Map<String, Object> globalConfig = LogFeeder.globalConfigs;
     if (globalConfig == null) {
       globalConfig = new HashMap<>();
     }
@@ -173,8 +170,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
       globalConfig.put("copy_file", false);
       globalConfig.put("process_file", true);
       globalConfig.put("tail", false);
-      Map<String, Object> addFields = (Map<String, Object>) globalConfig
-          .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) globalConfig.get("add_fields");
       if (addFields == null) {
         addFields = new HashMap<>();
       }
@@ -216,7 +212,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
 
   @VisibleForTesting
   protected S3Uploader createUploader(String logType) {
-    S3Uploader uploader = new S3Uploader(s3OutputConfiguration, S3Util.INSTANCE, true, logType);
+    S3Uploader uploader = new S3Uploader(s3OutputConfiguration, true, logType);
     uploader.startUploaderThread();
     return uploader;
   }
@@ -224,8 +220,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   @VisibleForTesting
   protected LogSpooler createSpooler(String filePath) {
     String spoolDirectory = LogFeederUtil.getLogfeederTempDir() + "/s3/service";
-    logger.info(String.format("Creating spooler with spoolDirectory=%s, filePath=%s",
-        spoolDirectory, filePath));
+    LOG.info(String.format("Creating spooler with spoolDirectory=%s, filePath=%s", spoolDirectory, filePath));
     return new LogSpooler(spoolDirectory, new File(filePath).getName()+"-", this, this,
         s3OutputConfiguration.getRolloverTimeThresholdSecs());
   }
@@ -244,7 +239,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
     long currentSize = spoolFile.length();
     boolean result = (currentSize >= s3OutputConfiguration.getRolloverSizeThresholdBytes());
     if (result) {
-      logger.info(String.format("Rolling over %s, current size %d, threshold size %d", spoolFile, currentSize,
+      LOG.info(String.format("Rolling over %s, current size %d, threshold size %d", spoolFile, currentSize,
           s3OutputConfiguration.getRolloverSizeThresholdBytes()));
     }
     return result;

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index cd9ce4d..47f139d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -34,7 +34,8 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
+import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
+import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
@@ -76,7 +77,17 @@ public class OutputSolr extends Output {
 
   private BlockingQueue<OutputData> outgoingBuffer = null;
   private List<SolrWorkerThread> workerThreadList = new ArrayList<>();
-
+  
+  @Override
+  protected String getStatMetricName() {
+    return "output.solr.write_logs";
+  }
+  
+  @Override
+  protected String getWriteBytesMetricName() {
+    return "output.solr.write_bytes";
+  }
+  
   @Override
   public void init() throws Exception {
     super.init();
@@ -87,9 +98,6 @@ public class OutputSolr extends Output {
   }
 
   private void initParams() throws Exception {
-    statMetric.metricsName = "output.solr.write_logs";
-    writeBytesMetric.metricsName = "output.solr.write_bytes";
-
     splitMode = getStringValue("splits_interval_mins", "none");
     if (!splitMode.equalsIgnoreCase("none")) {
       splitInterval = getIntValue("split_interval_mins", DEFAULT_SPLIT_INTERVAL);
@@ -204,10 +212,8 @@ public class OutputSolr extends Output {
         LOG.info("Ping to Solr server is successful for worker=" + count);
       } else {
         LOG.warn(
-            String.format(
-                "Ping to Solr server failed. It would check again. worker=%d, "
-                    + "solrUrl=%s, zkConnectString=%s, collection=%s, response=%s",
-                count, solrUrl, zkConnectString, collection, response));
+            String.format("Ping to Solr server failed. It would check again. worker=%d, solrUrl=%s, zkConnectString=%s, " +
+                "collection=%s, response=%s", count, solrUrl, zkConnectString, collection, response));
       }
     } catch (Throwable t) {
       LOG.warn(String.format(
@@ -223,7 +229,7 @@ public class OutputSolr extends Output {
     
     while (true) {
       LOG.info("Checking if config is available");
-      if (FetchConfigFromSolr.isFilterAvailable()) {
+      if (LogConfigHandler.isFilterAvailable()) {
         LOG.info("Config is available");
         return;
       }
@@ -256,7 +262,7 @@ public class OutputSolr extends Output {
 
   private void useActualDateIfNeeded(Map<String, Object> jsonObj) {
     if (skipLogtime) {
-      jsonObj.put("logtime", LogFeederUtil.getActualDateStr());
+      jsonObj.put("logtime", DateUtil.getActualDateStr());
     }
   }
 
@@ -324,7 +330,7 @@ public class OutputSolr extends Output {
 
     private final SolrClient solrClient;
     private final Collection<SolrInputDocument> localBuffer = new ArrayList<>();
-    private final Map<String, InputMarker> latestInputMarkerList = new HashMap<>();
+    private final Map<String, InputMarker> latestInputMarkers = new HashMap<>();
 
     private long localBufferBytesSize = 0;
 
@@ -352,17 +358,16 @@ public class OutputSolr extends Output {
             }
           }
 
-          if (localBuffer.size() > 0 && ((outputData == null && isDrain())
-              || (nextDispatchDuration <= 0 || localBuffer.size() >= maxBufferSize))) {
+          if (localBuffer.size() > 0 && ((outputData == null && isDrain()) ||
+              (nextDispatchDuration <= 0 || localBuffer.size() >= maxBufferSize))) {
             boolean response = sendToSolr(outputData);
             if( isDrain() && !response) {
               //Since sending to Solr response failed and it is in draining mode, let's break;
-              LOG.warn("In drain mode and sending to Solr failed. So exiting. output=" 
-                  + getShortDescription());
+              LOG.warn("In drain mode and sending to Solr failed. So exiting. output=" + getShortDescription());
               break;
             }
           }
-          if( localBuffer.size() == 0 ) {
+          if (localBuffer.size() == 0) {
             //If localBuffer is empty, then reset the timer
             lastDispatchTime = currTimeMS;
           }
@@ -403,8 +408,7 @@ public class OutputSolr extends Output {
         } catch (IOException | SolrException exception) {
           // Transient error, lets block till it is available
           try {
-            LOG.warn("Solr is not reachable. Going to retry after "
-                + RETRY_INTERVAL + " seconds. " + "output="
+            LOG.warn("Solr is not reachable. Going to retry after " + RETRY_INTERVAL + " seconds. " + "output="
                 + getShortDescription(), exception);
             Thread.sleep(RETRY_INTERVAL * 1000);
           } catch (Throwable t) {
@@ -414,8 +418,8 @@ public class OutputSolr extends Output {
           // Something unknown happened. Let's not block because of this error. 
           // Clear the buffer
           String logMessageKey = this.getClass().getSimpleName() + "_SOLR_UPDATE_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-              "Error sending log message to server. Dropping logs", serverException, LOG, Level.ERROR);
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error sending log message to server. Dropping logs",
+              serverException, LOG, Level.ERROR);
           resetLocalBuffer();
           break;
         }
@@ -447,7 +451,7 @@ public class OutputSolr extends Output {
               Level.ERROR);
         }
       }
-      latestInputMarkerList.put(outputData.inputMarker.base64FileKey, outputData.inputMarker);
+      latestInputMarkers.put(outputData.inputMarker.base64FileKey, outputData.inputMarker);
       localBuffer.add(document);
     }
 
@@ -479,9 +483,9 @@ public class OutputSolr extends Output {
         LogFeederUtil.logErrorMessageByInterval(logMessageKey,
             String.format("Error writing to Solr. response=%s, log=%s", response, outputData), null, LOG, Level.ERROR);
       }
-      statMetric.count += localBuffer.size();
-      writeBytesMetric.count += localBufferBytesSize;
-      for (InputMarker inputMarker : latestInputMarkerList.values()) {
+      statMetric.value += localBuffer.size();
+      writeBytesMetric.value += localBufferBytesSize;
+      for (InputMarker inputMarker : latestInputMarkers.values()) {
         inputMarker.input.checkIn(inputMarker);
       }
     }
@@ -499,7 +503,7 @@ public class OutputSolr extends Output {
     public void resetLocalBuffer() {
       localBuffer.clear();
       localBufferBytesSize = 0;
-      latestInputMarkerList.clear();
+      latestInputMarkers.clear();
     }
 
     public boolean isDone() {
@@ -512,9 +516,7 @@ public class OutputSolr extends Output {
   }
 
   @Override
-  public void copyFile(File inputFile, InputMarker inputMarker)
-      throws UnsupportedOperationException {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=solr");
+  public void copyFile(File inputFile, InputMarker inputMarker) throws UnsupportedOperationException {
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=solr");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
index 58282e0..8c544cf 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,9 +18,9 @@
 
 package org.apache.ambari.logfeeder.output;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.PlaceholderUtil;
-import org.apache.ambari.logfeeder.util.S3Util;
 
 import java.util.HashMap;
 
@@ -40,7 +40,7 @@ public class S3LogPathResolver {
   public String getResolvedPath(String baseKeyPrefix, String keySuffix, String cluster) {
     HashMap<String, String> contextParam = buildContextParam(cluster);
     String resolvedKeyPrefix = PlaceholderUtil.replaceVariables(baseKeyPrefix, contextParam);
-    return resolvedKeyPrefix + S3Util.S3_PATH_SEPARATOR + keySuffix;
+    return resolvedKeyPrefix + LogFeederConstants.S3_PATH_SEPARATOR + keySuffix;
   }
 
   private HashMap<String, String> buildContextParam(String cluster) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
index 485b0d4..e5974c5 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -103,8 +103,7 @@ public class S3OutputConfiguration {
     };
 
     for (int i = 0; i < longValuedKeysToCopy.length; i++) {
-      configs.put(longValuedKeysToCopy[i],
-          configBlock.getLongValue(longValuedKeysToCopy[i], defaultValuesForLongValuedKeys[i]));
+      configs.put(longValuedKeysToCopy[i], configBlock.getLongValue(longValuedKeysToCopy[i], defaultValuesForLongValuedKeys[i]));
     }
 
     configs.put(ADDITIONAL_FIELDS_KEY, configBlock.getNVList(ADDITIONAL_FIELDS_KEY));

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
index fd59c51..e95a663 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,8 +18,12 @@
 
 package org.apache.ambari.logfeeder.output;
 
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.services.s3.transfer.TransferManager;
+import com.amazonaws.services.s3.transfer.Upload;
 import com.google.common.annotations.VisibleForTesting;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.util.CompressionUtil;
 import org.apache.ambari.logfeeder.util.S3Util;
 import org.apache.log4j.Logger;
@@ -39,20 +43,18 @@ import java.util.concurrent.atomic.AtomicBoolean;
  * {@link org.apache.ambari.logfeeder.input.InputFile}.
  */
 public class S3Uploader implements Runnable {
+  private static final Logger LOG = Logger.getLogger(S3Uploader.class);
+  
   public static final String POISON_PILL = "POISON-PILL";
-  private static Logger logger = Logger.getLogger(S3Uploader.class);
 
   private final S3OutputConfiguration s3OutputConfiguration;
-  private final S3Util s3UtilInstance;
   private final boolean deleteOnEnd;
-  private String logType;
+  private final String logType;
   private final BlockingQueue<String> fileContextsToUpload;
-  private AtomicBoolean stopRunningThread = new AtomicBoolean(false);
+  private final AtomicBoolean stopRunningThread = new AtomicBoolean(false);
 
-  public S3Uploader(S3OutputConfiguration s3OutputConfiguration, S3Util s3UtilInstance, boolean deleteOnEnd,
-                    String logType) {
+  public S3Uploader(S3OutputConfiguration s3OutputConfiguration, boolean deleteOnEnd, String logType) {
     this.s3OutputConfiguration = s3OutputConfiguration;
-    this.s3UtilInstance = s3UtilInstance;
     this.deleteOnEnd = deleteOnEnd;
     this.logType = logType;
     this.fileContextsToUpload = new LinkedBlockingQueue<>();
@@ -81,7 +83,7 @@ public class S3Uploader implements Runnable {
     stopRunningThread.set(true);
     boolean offerStatus = fileContextsToUpload.offer(POISON_PILL);
     if (!offerStatus) {
-      logger.warn("Could not add poison pill to interrupt uploader thread.");
+      LOG.warn("Could not add poison pill to interrupt uploader thread.");
     }
   }
 
@@ -92,7 +94,7 @@ public class S3Uploader implements Runnable {
   void addFileForUpload(String fileToUpload) {
     boolean offerStatus = fileContextsToUpload.offer(fileToUpload);
     if (!offerStatus) {
-      logger.error("Could not add file " + fileToUpload + " for upload.");
+      LOG.error("Could not add file " + fileToUpload + " for upload.");
     }
   }
 
@@ -102,12 +104,12 @@ public class S3Uploader implements Runnable {
       try {
         String fileNameToUpload = fileContextsToUpload.take();
         if (POISON_PILL.equals(fileNameToUpload)) {
-          logger.warn("Found poison pill while waiting for files to upload, exiting");
+          LOG.warn("Found poison pill while waiting for files to upload, exiting");
           return;
         }
         uploadFile(new File(fileNameToUpload), logType);
       } catch (InterruptedException e) {
-        logger.error("Interrupted while waiting for elements from fileContextsToUpload", e);
+        LOG.error("Interrupted while waiting for elements from fileContextsToUpload", e);
         return;
       }
     }
@@ -130,34 +132,44 @@ public class S3Uploader implements Runnable {
     String compressionAlgo = s3OutputConfiguration.getCompressionAlgo();
 
     String keySuffix = fileToUpload.getName() + "." + compressionAlgo;
-    String s3Path = new S3LogPathResolver().
-        getResolvedPath(s3OutputConfiguration.getS3Path()+S3Util.S3_PATH_SEPARATOR+logType,
-            keySuffix, s3OutputConfiguration.getCluster());
-    logger.info(String.format("keyPrefix=%s, keySuffix=%s, s3Path=%s",
-        s3OutputConfiguration.getS3Path(), keySuffix, s3Path));
+    String s3Path = new S3LogPathResolver().getResolvedPath(
+        s3OutputConfiguration.getS3Path() + LogFeederConstants.S3_PATH_SEPARATOR + logType, keySuffix,
+        s3OutputConfiguration.getCluster());
+    LOG.info(String.format("keyPrefix=%s, keySuffix=%s, s3Path=%s", s3OutputConfiguration.getS3Path(), keySuffix, s3Path));
     File sourceFile = createCompressedFileForUpload(fileToUpload, compressionAlgo);
 
-    logger.info("Starting S3 upload " + sourceFile + " -> " + bucketName + ", " + s3Path);
-    s3UtilInstance.uploadFileTos3(bucketName, s3Path, sourceFile, s3AccessKey,
-        s3SecretKey);
+    LOG.info("Starting S3 upload " + sourceFile + " -> " + bucketName + ", " + s3Path);
+    uploadFileToS3(bucketName, s3Path, sourceFile, s3AccessKey, s3SecretKey);
 
     // delete local compressed file
     sourceFile.delete();
     if (deleteOnEnd) {
-      logger.info("Deleting input file as required");
+      LOG.info("Deleting input file as required");
       if (!fileToUpload.delete()) {
-        logger.error("Could not delete file " + fileToUpload.getAbsolutePath() + " after upload to S3");
+        LOG.error("Could not delete file " + fileToUpload.getAbsolutePath() + " after upload to S3");
       }
     }
     return s3Path;
   }
 
   @VisibleForTesting
+  protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+    TransferManager transferManager = S3Util.getTransferManager(accessKey, secretKey);
+    try {
+      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
+      upload.waitForUploadResult();
+    } catch (AmazonClientException | InterruptedException e) {
+      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(), e);
+    } finally {
+      S3Util.shutdownTransferManager(transferManager);
+    }
+  }
+
+  @VisibleForTesting
   protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
-    File outputFile = new File(fileToUpload.getParent(), fileToUpload.getName() + "_"
-        + new Date().getTime() + "." + compressionAlgo);
-    outputFile = CompressionUtil.compressFile(fileToUpload, outputFile,
-        compressionAlgo);
+    File outputFile = new File(fileToUpload.getParent(), fileToUpload.getName() + "_" + new Date().getTime() +
+        "." + compressionAlgo);
+    outputFile = CompressionUtil.compressFile(fileToUpload, outputFile, compressionAlgo);
     return outputFile;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java
index fb263ba..1f13357 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpooler.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -39,8 +39,9 @@ import java.util.concurrent.atomic.AtomicBoolean;
  * {@link RolloverHandler} to trigger the handling of the rolled over file.
  */
 public class LogSpooler {
+  
+  private static final Logger LOG = Logger.getLogger(LogSpooler.class);
   public static final long TIME_BASED_ROLLOVER_DISABLED_THRESHOLD = 0;
-  static private Logger logger = Logger.getLogger(LogSpooler.class);
   static final String fileDateFormat = "yyyy-MM-dd-HH-mm-ss";
 
   private String spoolDirectory;
@@ -98,7 +99,7 @@ public class LogSpooler {
   private void initializeSpoolDirectory() {
     File spoolDir = new File(spoolDirectory);
     if (!spoolDir.exists()) {
-      logger.info("Creating spool directory: " + spoolDir);
+      LOG.info("Creating spool directory: " + spoolDir);
       boolean result = spoolDir.mkdirs();
       if (!result) {
         throw new LogSpoolerException("Could not create spool directory: " + spoolDirectory);
@@ -116,7 +117,7 @@ public class LogSpooler {
           + ", error message: " + e.getLocalizedMessage(), e);
     }
     currentSpoolerContext = new LogSpoolerContext(currentSpoolFile);
-    logger.info("Initialized spool file at path: " + currentSpoolFile);
+    LOG.info("Initialized spool file at path: " + currentSpoolFile);
   }
 
   @VisibleForTesting
@@ -141,7 +142,7 @@ public class LogSpooler {
     currentSpoolBufferedWriter.println(logEvent);
     currentSpoolerContext.logEventSpooled();
     if (rolloverCondition.shouldRollover(currentSpoolerContext)) {
-      logger.info("Trying to rollover based on rollover condition");
+      LOG.info("Trying to rollover based on rollover condition");
       tryRollover();
     }
   }
@@ -154,19 +155,19 @@ public class LogSpooler {
    * rolled over file.
    */
   public void rollover() {
-    logger.info("Rollover condition detected, rolling over file: " + currentSpoolFile);
+    LOG.info("Rollover condition detected, rolling over file: " + currentSpoolFile);
     currentSpoolBufferedWriter.flush();
     if (currentSpoolFile.length()==0) {
-      logger.info("No data in file " + currentSpoolFile + ", not doing rollover");
+      LOG.info("No data in file " + currentSpoolFile + ", not doing rollover");
     } else {
       currentSpoolBufferedWriter.close();
       rolloverHandler.handleRollover(currentSpoolFile);
-      logger.info("Invoked rollover handler with file: " + currentSpoolFile);
+      LOG.info("Invoked rollover handler with file: " + currentSpoolFile);
       initializeSpoolState();
     }
     boolean status = rolloverInProgress.compareAndSet(true, false);
     if (!status) {
-      logger.error("Should have reset rollover flag!!");
+      LOG.error("Should have reset rollover flag!!");
     }
   }
 
@@ -174,7 +175,7 @@ public class LogSpooler {
     if (rolloverInProgress.compareAndSet(false, true)) {
       rollover();
     } else {
-      logger.warn("Ignoring rollover call as rollover already in progress for file " +
+      LOG.warn("Ignoring rollover call as rollover already in progress for file " +
           currentSpoolFile);
     }
   }
@@ -197,7 +198,7 @@ public class LogSpooler {
   private class LogSpoolerRolloverTimerTask extends TimerTask {
     @Override
     public void run() {
-      logger.info("Trying rollover based on time");
+      LOG.info("Trying rollover based on time");
       tryRollover();
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java
index 084d6a2..616300f 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerContext.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java
index 1e12fb7..14bb139 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerException.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java
index 8279645..48ace11 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverCondition.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java
index 11308e4..2ec2708 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/spool/RolloverHandler.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
index 15f7594..f814a92 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
@@ -20,62 +20,20 @@ package org.apache.ambari.logfeeder.util;
 
 import org.apache.log4j.Logger;
 
-import com.amazonaws.AmazonServiceException;
 import com.amazonaws.auth.AWSCredentials;
 import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.identitymanagement.AmazonIdentityManagementClient;
 
-public enum AWSUtil {
-  INSTANCE;
+public class AWSUtil {
   private static final Logger LOG = Logger.getLogger(AWSUtil.class);
 
-  public String getAwsUserName(String accessKey, String secretKey) {
-    String username = null;
-    AWSCredentials awsCredentials = createAWSCredentials(accessKey, secretKey);
-    AmazonIdentityManagementClient amazonIdentityManagementClient;
-    if (awsCredentials != null) {
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient(
-          awsCredentials);
-    } else {
-      // create default client
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient();
-    }
-    try {
-      username = amazonIdentityManagementClient.getUser().getUser()
-          .getUserName();
-    } catch (AmazonServiceException e) {
-      if (e.getErrorCode().compareTo("AccessDenied") == 0) {
-        String arn = null;
-        String msg = e.getMessage();
-        int arnIdx = msg.indexOf("arn:aws");
-        if (arnIdx != -1) {
-          int arnSpace = msg.indexOf(" ", arnIdx);
-          // should be similar to "arn:aws:iam::111111111111:user/username"
-          arn = msg.substring(arnIdx, arnSpace);
-        }
-        if (arn != null) {
-          String[] arnParts = arn.split(":");
-          if (arnParts != null && arnParts.length > 5) {
-            username = arnParts[5];
-            if (username != null) {
-              username = username.replace("user/", "");
-            }
-          }
-        }
-      }
-    } catch (Exception exception) {
-      LOG.error(
-          "Error in getting username :" + exception.getLocalizedMessage(),
-          exception.getCause());
-    }
-    return username;
+  private AWSUtil() {
+    throw new UnsupportedOperationException();
   }
 
-  public AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
+  public static AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
     if (accessKey != null && secretKey != null) {
       LOG.debug("Creating aws client as per new accesskey and secretkey");
-      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey,
-          secretKey);
+      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
       return awsCredentials;
     } else {
       return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
index a92ba29..5049b62 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
@@ -21,69 +21,90 @@ package org.apache.ambari.logfeeder.util;
 import java.io.File;
 import java.util.HashMap;
 
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 
 public class AliasUtil {
 
-  private static Logger logger = Logger.getLogger(AliasUtil.class);
+  private static final Logger LOG = Logger.getLogger(AliasUtil.class);
 
-  private static AliasUtil instance = null;
+  private static final String ALIAS_CONFIG_JSON = "alias_config.json";
+  private static HashMap<String, Object> aliasMap = null;
 
-  private static String aliasConfigJson = "alias_config.json";
-
-  private HashMap<String, Object> aliasMap = null;
-
-  public static enum ALIAS_TYPE {
-    INPUT, FILTER, MAPPER, OUTPUT
+  static {
+    File jsonFile = FileUtil.getFileFromClasspath(ALIAS_CONFIG_JSON);
+    if (jsonFile != null) {
+      aliasMap = FileUtil.readJsonFromFile(jsonFile);
+    }
   }
 
-  public static enum ALIAS_PARAM {
-    KLASS
+  public static enum AliasType {
+    INPUT, FILTER, MAPPER, OUTPUT
   }
 
   private AliasUtil() {
-    init();
+    throw new UnsupportedOperationException();
   }
 
-  public static AliasUtil getInstance() {
-    if (instance == null) {
-      synchronized (AliasUtil.class) {
-        if (instance == null) {
-          instance = new AliasUtil();
-        }
-      }
+  public static Object getClassInstance(String key, AliasType aliasType) {
+    String classFullName = getClassFullName(key, aliasType);
+    
+    Object instance = null;
+    try {
+      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
+    } catch (Exception exception) {
+      LOG.error("Unsupported class = " + classFullName, exception.getCause());
     }
-    return instance;
-  }
 
-  /**
-   */
-  private void init() {
-    File jsonFile = LogFeederUtil.getFileFromClasspath(aliasConfigJson);
-    if (jsonFile != null) {
-      this.aliasMap = LogFeederUtil.readJsonFromFile(jsonFile);
+    if (instance != null) {
+      boolean isValid = false;
+      switch (aliasType) {
+        case FILTER:
+          isValid = Filter.class.isAssignableFrom(instance.getClass());
+          break;
+        case INPUT:
+          isValid = Input.class.isAssignableFrom(instance.getClass());
+          break;
+        case OUTPUT:
+          isValid = Output.class.isAssignableFrom(instance.getClass());
+          break;
+        case MAPPER:
+          isValid = Mapper.class.isAssignableFrom(instance.getClass());
+          break;
+        default:
+          LOG.warn("Unhandled aliasType: " + aliasType);
+          isValid = true;
+      }
+      if (!isValid) {
+        LOG.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
+      }
     }
-
+    return instance;
   }
 
-
-  public String readAlias(String key, ALIAS_TYPE aliastype, ALIAS_PARAM aliasParam) {
-    String result = key;// key as a default value;
+  private static String getClassFullName(String key, AliasType aliastype) {
+    String className = null;// key as a default value;
+    
     HashMap<String, String> aliasInfo = getAliasInfo(key, aliastype);
-    String value = aliasInfo.get(aliasParam.name().toLowerCase());
-    if (value != null && !value.isEmpty()) {
-      result = value;
-      logger.debug("Alias found for key :" + key + ",  param :" + aliasParam.name().toLowerCase() + ", value :"
-        + value + " aliastype:" + aliastype.name());
+    String value = aliasInfo.get("klass");
+    if (!StringUtils.isEmpty(value)) {
+      className = value;
+      LOG.debug("Class name found for key :" + key + ", class name :" + className + " aliastype:" + aliastype.name());
     } else {
-      logger.debug("Alias not found for key :" + key + ", param :" + aliasParam.name().toLowerCase());
+      LOG.debug("Class name not found for key :" + key + " aliastype:" + aliastype.name());
     }
-    return result;
+    
+    return className;
   }
 
   @SuppressWarnings("unchecked")
-  private HashMap<String, String> getAliasInfo(String key, ALIAS_TYPE aliastype) {
-    HashMap<String, String> aliasInfo = null;
+  private static HashMap<String, String> getAliasInfo(String key, AliasType aliastype) {
+    HashMap<String, String> aliasInfo = new HashMap<String, String>();
+    
     if (aliasMap != null) {
       String typeKey = aliastype.name().toLowerCase();
       HashMap<String, Object> typeJson = (HashMap<String, Object>) aliasMap.get(typeKey);
@@ -91,9 +112,7 @@ public class AliasUtil {
         aliasInfo = (HashMap<String, String>) typeJson.get(key);
       }
     }
-    if (aliasInfo == null) {
-      aliasInfo = new HashMap<String, String>();
-    }
+    
     return aliasInfo;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java
index c2addbd..c460ab3 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/CompressionUtil.java
@@ -37,25 +37,20 @@ public class CompressionUtil {
     FileInputStream ios = null;
     try {
       if (!inputFile.exists()) {
-        throw new IllegalArgumentException("Input File:"
-            + inputFile.getAbsolutePath() + " is not exist.");
+        throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is not exist.");
       }
       if (inputFile.isDirectory()) {
-        throw new IllegalArgumentException("Input File:"
-            + inputFile.getAbsolutePath() + " is a directory.");
+        throw new IllegalArgumentException("Input File:" + inputFile.getAbsolutePath() + " is a directory.");
       }
       File parent = outputFile.getParentFile();
       if (parent != null && !parent.exists()) {
         boolean isParentCreated = parent.mkdirs();
         if (!isParentCreated) {
-          throw new IllegalAccessException(
-              "User does not have permission to create parent directory :"
-                  + parent.getAbsolutePath());
+          throw new IllegalAccessException( "User does not have permission to create parent directory :" + parent.getAbsolutePath());
         }
       }
-      final OutputStream out = new FileOutputStream(outputFile);
-      cos = new CompressorStreamFactory().createCompressorOutputStream(
-          algoName, out);
+      OutputStream out = new FileOutputStream(outputFile);
+      cos = new CompressorStreamFactory().createCompressorOutputStream(algoName, out);
       ios = new FileInputStream(inputFile);
       IOUtils.copy(ios, cos);
     } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
index 2ca9353..6321e17 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
@@ -20,12 +20,17 @@ package org.apache.ambari.logfeeder.util;
 
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.TimeZone;
 
 import org.apache.log4j.Logger;
 
 public class DateUtil {
-  private static final Logger logger = Logger.getLogger(DateUtil.class);
-
+  private static final Logger LOG = Logger.getLogger(DateUtil.class);
+  
+  private DateUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
   public static String dateToString(Date date, String dateFormat) {
     if (date == null || dateFormat == null || dateFormat.isEmpty()) {
       return "";
@@ -34,8 +39,36 @@ public class DateUtil {
       SimpleDateFormat formatter = new SimpleDateFormat(dateFormat);
       return formatter.format(date).toString();
     } catch (Exception e) {
-      logger.error("Error in coverting dateToString  format :" + dateFormat, e);
+      LOG.error("Error in coverting dateToString  format :" + dateFormat, e);
     }
     return "";
   }
+
+  private final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
+      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+      return sdf;
+    }
+  };
+
+  public static String getDate(String timeStampStr) {
+    try {
+      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
+    } catch (Exception ex) {
+      LOG.error(ex);
+      return null;
+    }
+  }
+
+  public static String getActualDateStr() {
+    try {
+      return dateFormatter.get().format(new Date());
+    } catch (Exception ex) {
+      LOG.error(ex);
+      return null;
+    }
+  }
 }


[44/50] [abbrv] ambari git commit: AMBARI-18095. Update zookeeper server/client tests (oleewere)

Posted by ol...@apache.org.
AMBARI-18095. Update zookeeper server/client tests (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4c19f4a2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4c19f4a2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4c19f4a2

Branch: refs/heads/branch-dev-logsearch
Commit: 4c19f4a26b6c9ab07efd0bdc6300596983ea3786
Parents: d15e0b2
Author: oleewere <ol...@gmail.com>
Authored: Thu Sep 8 01:15:01 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:01 2016 +0200

----------------------------------------------------------------------
 .../python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py   | 4 ++--
 .../python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py   | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4c19f4a2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
index 8e028a1..e4c6fbd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
@@ -79,7 +79,7 @@ class TestZookeeperClient(RMFTestCase):
     )
     self.assertResourceCalled('File',
                               '/etc/zookeeper/conf/log4j.properties',
-                              content='log4jproperties\nline2',
+                              content=InlineTemplate(self.getConfig()['configurations']['zookeeper-log4j']['content']),
                               mode=0644,
                               group='hadoop',
                               owner='zookeeper'
@@ -142,7 +142,7 @@ class TestZookeeperClient(RMFTestCase):
     )
     self.assertResourceCalled('File',
                               '/etc/zookeeper/conf/log4j.properties',
-                              content='log4jproperties\nline2',
+                              content=InlineTemplate(self.getConfig()['configurations']['zookeeper-log4j']['content']),
                               mode=0644,
                               group='hadoop',
                               owner='zookeeper'

http://git-wip-us.apache.org/repos/asf/ambari/blob/4c19f4a2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
index 7757fd9..b9ebea2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
@@ -167,7 +167,7 @@ class TestZookeeperServer(RMFTestCase):
     )
     self.assertResourceCalled('File',
                               '/etc/zookeeper/conf/log4j.properties',
-                              content='log4jproperties\nline2',
+                              content=InlineTemplate(self.getConfig()['configurations']['zookeeper-log4j']['content']),
                               mode=0644,
                               group='hadoop',
                               owner='zookeeper'
@@ -225,10 +225,10 @@ class TestZookeeperServer(RMFTestCase):
     )
     self.assertResourceCalled('File',
                               '/etc/zookeeper/conf/log4j.properties',
-                              content='log4jproperties\nline2',
                               mode=0644,
-                              group='hadoop',
-                              owner='zookeeper'
+                              owner='zookeeper',
+                              content=InlineTemplate(self.getConfig()['configurations']['zookeeper-log4j']['content']),
+                              group='hadoop'
     )
     self.assertResourceCalled('File', '/etc/zookeeper/conf/zookeeper_jaas.conf',
       owner = 'zookeeper',


[24/50] [abbrv] ambari git commit: AMBARI-18253. Fix LogSearch utility classes (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18253. Fix LogSearch utility classes (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/42ad4024
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/42ad4024
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/42ad4024

Branch: refs/heads/branch-dev-logsearch
Commit: 42ad4024d56f5562f733144414ec9fb2ee7313e6
Parents: 9c29308
Author: oleewere <ol...@gmail.com>
Authored: Thu Aug 25 13:02:45 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:00 2016 +0200

----------------------------------------------------------------------
 .../org/apache/ambari/logsearch/LogSearch.java  |   8 +-
 .../ambari/logsearch/common/ConfigHelper.java   | 136 ++++++++++++
 .../logsearch/common/ExternalServerClient.java  |  96 +++++++++
 .../logsearch/common/PropertiesHelper.java      | 181 ++++++++++++++++
 .../ambari/logsearch/common/SearchCriteria.java |   5 +-
 .../logsearch/common/XMLPropertiesHelper.java   |  79 +++++++
 .../ambari/logsearch/dao/AuditSolrDao.java      |  20 +-
 .../logsearch/dao/ServiceLogsSolrDao.java       |  16 +-
 .../ambari/logsearch/dao/SolrDaoBase.java       |  21 +-
 .../ambari/logsearch/dao/UserConfigSolrDao.java |  27 +--
 .../apache/ambari/logsearch/dao/UserDao.java    |  17 +-
 .../logsearch/graph/GraphDataGenerator.java     |  36 ++--
 .../logsearch/graph/GraphDataGeneratorBase.java |   6 +-
 .../ambari/logsearch/manager/AuditMgr.java      |  93 ++++----
 .../ambari/logsearch/manager/LogFileMgr.java    |  22 +-
 .../ambari/logsearch/manager/LogsMgr.java       | 211 +++++++++----------
 .../ambari/logsearch/manager/MgrBase.java       |  30 +--
 .../ambari/logsearch/manager/UserConfigMgr.java |  47 ++---
 .../ambari/logsearch/query/QueryGeneration.java |  36 ++--
 .../logsearch/query/QueryGenerationBase.java    |  34 ++-
 .../ambari/logsearch/rest/ServiceLogsREST.java  |   4 -
 .../ambari/logsearch/rest/UserConfigREST.java   |   4 -
 .../solr/metrics/SolrMetricsLoader.java         |   8 +-
 .../apache/ambari/logsearch/util/BizUtil.java   |  27 +--
 .../ambari/logsearch/util/CommonUtil.java       |   4 +
 .../ambari/logsearch/util/ConfigUtil.java       | 140 ------------
 .../apache/ambari/logsearch/util/DateUtil.java  |  32 +--
 .../logsearch/util/ExternalServerClient.java    |  96 ---------
 .../apache/ambari/logsearch/util/FileUtil.java  |  17 +-
 .../apache/ambari/logsearch/util/JSONUtil.java  |  41 ++--
 .../ambari/logsearch/util/PropertiesUtil.java   | 182 ----------------
 .../apache/ambari/logsearch/util/QueryBase.java | 132 ------------
 .../ambari/logsearch/util/RESTErrorUtil.java    |  16 +-
 .../apache/ambari/logsearch/util/SolrUtil.java  | 164 +++++++++++---
 .../logsearch/util/XMLPropertiesUtil.java       |  85 --------
 .../LogsearchAuthFailureHandler.java            |  11 +-
 .../LogsearchAuthenticationEntryPoint.java      |   8 +-
 .../LogsearchKRBAuthenticationFilter.java       |  26 +--
 .../ambari/logsearch/web/security/LdapUtil.java |   8 +-
 ...LogsearchAbstractAuthenticationProvider.java |   4 +-
 .../LogsearchAuthenticationProvider.java        |  13 +-
 ...rchExternalServerAuthenticationProvider.java |  13 +-
 .../main/webapp/META-INF/applicationContext.xml |   4 +-
 .../ambari/logsearch/dao/SolrDaoBaseTest.java   |   1 -
 .../logsearch/dao/UserConfigSolrDaoTest.java    |  13 +-
 .../src/test/resources/applicationContext.xml   |   4 +-
 .../applicationContext_testManagers.xml         |  53 -----
 47 files changed, 1013 insertions(+), 1218 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index fcebcea..15355a1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -25,10 +25,10 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 
+import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.solr.metrics.SolrMetricsLoader;
-import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.eclipse.jetty.server.Connector;
@@ -92,7 +92,7 @@ public class LogSearch {
     logger.debug(server.dump());
     logger
         .debug("==============================================================================");
-    ConfigUtil.initializeApplicationConfig();
+    ConfigHelper.initializeApplicationConfig();
     server.join();
   }
 
@@ -100,7 +100,7 @@ public class LogSearch {
     Server server = new Server();
     ServerConnector connector = new ServerConnector(server);
     boolean portSpecified = argv.length > 0;
-    String protcolProperty = PropertiesUtil.getProperty(LOGSEARCH_PROTOCOL_PROP,HTTP_PROTOCOL);
+    String protcolProperty = PropertiesHelper.getProperty(LOGSEARCH_PROTOCOL_PROP,HTTP_PROTOCOL);
     if (StringUtils.isEmpty(protcolProperty)) {
       protcolProperty = HTTP_PROTOCOL;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
new file mode 100644
index 0000000..edb4ffa
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.common;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.ambari.logsearch.manager.MgrBase;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.log4j.Logger;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+
+public class ConfigHelper {
+  private static final Logger logger = Logger.getLogger(MgrBase.class);
+
+  public static HashMap<String, String> serviceLogsColumnMapping = new HashMap<String, String>();
+  public static HashMap<String, String> auditLogsColumnMapping = new HashMap<String, String>();
+
+  private ConfigHelper() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static void initializeApplicationConfig() {
+    String serviceLogsColumnMappingArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.service.logs.column.mapping");
+    String auditLogsColumnMappingArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.audit.logs.column.mapping");
+
+    // Initializing column mapping for Service Logs
+    intializeUISolrColumnMapping(serviceLogsColumnMappingArray, serviceLogsColumnMapping);
+
+    // Initializing column mapping for Audit Logs
+    intializeUISolrColumnMapping(auditLogsColumnMappingArray, auditLogsColumnMapping);
+  }
+
+  private static void intializeUISolrColumnMapping(String columnMappingArray[], HashMap<String, String> columnMappingMap) {
+
+    if (columnMappingArray != null && columnMappingArray.length > 0) {
+      for (String columnMapping : columnMappingArray) {
+        String mapping[] = columnMapping.split(":");
+        if (mapping.length > 1) {
+          String solrField = mapping[0];
+          String uiField = mapping[1];
+          
+          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX, uiField);
+          columnMappingMap.put(uiField + LogSearchConstants.UI_SUFFIX, solrField);
+        }
+      }
+    }
+  }
+
+  public static void extractSchemaFieldsName(String responseString, HashMap<String, String> schemaFieldsNameMap,
+      HashMap<String, String> schemaFieldTypeMap) {
+    try {
+      JSONObject jsonObject = new JSONObject(responseString);
+      JSONObject schemajsonObject = jsonObject.getJSONObject("schema");
+      JSONArray jsonArrayList = schemajsonObject.getJSONArray("fields");
+      JSONArray fieldTypeJsonArray = schemajsonObject
+          .getJSONArray("fieldTypes");
+      if (jsonArrayList == null) {
+        return;
+      }
+      if (fieldTypeJsonArray == null) {
+        return;
+      }
+      HashMap<String, String> _schemaFieldTypeMap = new HashMap<String, String>();
+      HashMap<String, String> _schemaFieldsNameMap = new HashMap<String, String>();
+      for (int i = 0; i < fieldTypeJsonArray.length(); i++) {
+        JSONObject typeObject = fieldTypeJsonArray.getJSONObject(i);
+        String name = typeObject.getString("name");
+        String fieldTypeJson = typeObject.toString();
+        _schemaFieldTypeMap.put(name, fieldTypeJson);
+      }
+
+      for (int i = 0; i < jsonArrayList.length(); i++) {
+        JSONObject explrObject = jsonArrayList.getJSONObject(i);
+        String name = explrObject.getString("name");
+        String type = explrObject.getString("type");
+        if (!name.contains("@") && !name.startsWith("_") && !name.contains("_md5") && !name.contains("_ms") &&
+            !name.contains(LogSearchConstants.NGRAM_SUFFIX) && !name.contains("tags") && !name.contains("_str")) {
+          _schemaFieldsNameMap.put(name, type);
+        }
+      }
+      schemaFieldsNameMap.clear();
+      schemaFieldTypeMap.clear();
+      schemaFieldsNameMap.putAll(_schemaFieldsNameMap);
+      schemaFieldTypeMap.putAll(_schemaFieldTypeMap);
+    } catch (Exception e) {
+      logger.error(e + "Credentials not specified in logsearch.properties " + MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  @SuppressWarnings("rawtypes")
+  public static void getSchemaFieldsName(String excludeArray[], List<String> fieldNames, SolrDaoBase solrDaoBase) {
+    if (!solrDaoBase.schemaFieldsNameMap.isEmpty()) {
+      Iterator iteratorSechmaFieldsName = solrDaoBase.schemaFieldsNameMap.entrySet().iterator();
+      while (iteratorSechmaFieldsName.hasNext()) {
+        Map.Entry fieldName = (Map.Entry) iteratorSechmaFieldsName.next();
+        String field = "" + fieldName.getKey();
+        if (!isExclude(field, excludeArray)) {
+          fieldNames.add(field);
+        }
+      }
+    }
+  }
+
+  private static boolean isExclude(String name, String excludeArray[]) {
+    if (!ArrayUtils.isEmpty(excludeArray)) {
+      for (String exclude : excludeArray) {
+        if (name.equals(exclude)){
+          return true;
+        }
+      }
+    }
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
new file mode 100644
index 0000000..5235fab
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import java.util.List;
+import java.util.Map;
+
+import javax.annotation.PostConstruct;
+import javax.ws.rs.client.Invocation;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+
+import org.apache.ambari.logsearch.web.security.LogsearchAbstractAuthenticationProvider;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.glassfish.jersey.client.JerseyClient;
+import org.glassfish.jersey.client.JerseyClientBuilder;
+import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
+import org.glassfish.jersey.filter.LoggingFilter;
+import org.springframework.stereotype.Component;
+
+/**
+ * Layer to send REST request to External server using jersey client
+ */
+@Component
+public class ExternalServerClient {
+  private static Logger LOG = Logger.getLogger(ExternalServerClient.class);
+  private static final ThreadLocal<JerseyClient> localJerseyClient = new ThreadLocal<JerseyClient>(){
+    @Override
+    protected JerseyClient initialValue() {
+      return JerseyClientBuilder.createClient();
+    }
+  };
+  private String hostURL = "http://host:ip";// default
+  private boolean enableLog = false;// default
+
+  @PostConstruct
+  public void initialization() {
+    hostURL = PropertiesHelper.getProperty(
+        LogsearchAbstractAuthenticationProvider.AUTH_METHOD_PROP_START_WITH
+            + "external_auth.host_url", hostURL);
+  }
+
+  /**
+   * Send GET request to an external server
+   */
+  @SuppressWarnings({ "unchecked", "rawtypes" })
+  public Object sendGETRequest(String url, Class klass, MultivaluedMap<String, String> queryParam,
+                               String username, String password)
+      throws Exception {
+    url = hostURL + url;
+    JerseyClient client = localJerseyClient.get();
+    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder().build();
+
+    client.register(authFeature);
+    if (enableLog) {
+      client.register(LoggingFilter.class);
+    }
+
+    WebTarget target = client.target(url);
+    LOG.debug("URL: " + url);
+    for (Map.Entry<String, List<String>> entry : queryParam.entrySet()) {
+      target = target.queryParam(entry.getKey(), entry.getValue());
+      LOG.debug(
+        String.format("Query parameter: name - %s  ; value - %s ;" + entry.getKey(), StringUtils.join(entry.getValue(),',')));
+    }
+    target
+      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_USERNAME, username)
+      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_PASSWORD, password);
+    Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
+    try {
+      return invocationBuilder.get().readEntity(klass);
+    } catch (Exception e) {
+      throw new Exception(e.getCause());
+    } finally {
+      localJerseyClient.remove();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
new file mode 100644
index 0000000..adb0699
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+import org.springframework.beans.BeansException;
+import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
+import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
+
+public class PropertiesHelper extends PropertyPlaceholderConfigurer {
+  private static final Logger logger = Logger.getLogger(PropertiesHelper.class);
+  
+  private static final String LOGSEARCH_PROP_FILE="logsearch.properties";
+  
+  private static Map<String, String> propertiesMap;
+
+  private PropertiesHelper() {
+  }
+  
+ static {
+    propertiesMap = new HashMap<String, String>();
+    Properties properties = new Properties();
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(LOGSEARCH_PROP_FILE);
+    FileInputStream fileInputStream = null;
+    try {
+      File file = new File(fileCompleteUrl.toURI());
+      fileInputStream = new FileInputStream(file.getAbsoluteFile());
+      properties.load(fileInputStream);
+    } catch (IOException | URISyntaxException e) {
+      logger.error("error loading prop for protocol config",e);
+    } finally {
+      if (fileInputStream != null) {
+        try {
+          fileInputStream.close();
+        } catch (IOException e) {
+        }
+      }
+    }
+    for (String key : properties.stringPropertyNames()) {
+      String value = properties.getProperty(key);
+      propertiesMap.put(key, value);
+    }
+  }
+
+  @Override
+  protected void processProperties(ConfigurableListableBeanFactory beanFactory, Properties props) throws BeansException {
+    super.processProperties(beanFactory, props);
+
+    propertiesMap = new HashMap<String, String>();
+
+    // First add the system properties
+    Set<Object> keySet = System.getProperties().keySet();
+    for (Object key : keySet) {
+      String keyStr = key.toString();
+      propertiesMap.put(keyStr, System.getProperties().getProperty(keyStr).trim());
+    }
+
+    // add our properties now
+    keySet = props.keySet();
+    for (Object key : keySet) {
+      String keyStr = key.toString();
+      propertiesMap.put(keyStr, props.getProperty(keyStr).trim());
+    }
+  }
+
+  public static String getProperty(String key, String defaultValue) {
+    if (key == null) {
+      return null;
+    }
+    String rtrnVal = propertiesMap.get(key);
+    if (rtrnVal == null) {
+      rtrnVal = defaultValue;
+    }
+    return rtrnVal;
+  }
+
+  public static String getProperty(String key) {
+    if (key == null) {
+      return null;
+    }
+    return propertiesMap.get(key);
+  }
+
+  public static String[] getPropertyStringList(String key) {
+    if (key == null) {
+      return null;
+    }
+    String value = propertiesMap.get(key);
+    if (value == null || value.trim().equals("")) {
+      return new String[0];
+    } else {
+      String[] splitValues = value.split(",");
+      String[] returnValues = new String[splitValues.length];
+      for (int i = 0; i < splitValues.length; i++) {
+        returnValues[i] = splitValues[i].trim();
+      }
+      return returnValues;
+    }
+  }
+
+  public static Integer getIntProperty(String key, int defaultValue) {
+    if (key == null) {
+      return null;
+    }
+    String rtrnVal = propertiesMap.get(key);
+    if (rtrnVal == null) {
+      return defaultValue;
+    }
+    return Integer.valueOf(rtrnVal);
+  }
+
+  public static Integer getIntProperty(String key) {
+    if (key == null) {
+      return null;
+    }
+    String rtrnVal = propertiesMap.get(key);
+    if (rtrnVal == null) {
+      return null;
+    }
+    return Integer.valueOf(rtrnVal);
+  }
+
+  public static Long getLongProperty(String key, long defaultValue) {
+    if (key == null) {
+      return null;
+    }
+    String rtrnVal = propertiesMap.get(key);
+    if (rtrnVal == null) {
+      return defaultValue;
+    }
+    return Long.valueOf(rtrnVal);
+  }
+
+  public static Long getLongProperty(String key) {
+    if (key == null) {
+      return null;
+    }
+    String rtrnVal = propertiesMap.get(key);
+    if (rtrnVal == null) {
+      return null;
+    }
+    return Long.valueOf(rtrnVal);
+  }
+
+  public static boolean getBooleanProperty(String key, boolean defaultValue) {
+    if (key == null) {
+      return defaultValue;
+    }
+    String value = getProperty(key);
+    if (value == null) {
+      return defaultValue;
+    }
+    return Boolean.parseBoolean(value);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
index 2cf2139..6b74144 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
@@ -26,7 +26,6 @@ import java.util.Set;
 
 import javax.servlet.http.HttpServletRequest;
 
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 
 public class SearchCriteria {
@@ -58,7 +57,7 @@ public class SearchCriteria {
       if (request.getParameter("pageSize") != null && (!request.getParameter("pageSize").isEmpty())) {
         this.maxRows = new Integer(request.getParameter("pageSize"));
       } else {
-        this.maxRows = PropertiesUtil.getIntProperty("db.maxResults", 50);
+        this.maxRows = PropertiesHelper.getIntProperty("db.maxResults", 50);
       }
     } catch (NumberFormatException e) {
       // do nothing
@@ -197,7 +196,7 @@ public class SearchCriteria {
    * @param caId
    */
   public void addParam(String name, Object value) {
-    String solrValue = PropertiesUtil.getProperty(name);
+    String solrValue = PropertiesHelper.getProperty(name);
     if (solrValue == null || solrValue.isEmpty()) {
       paramList.put(name, value);
     } else {

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/XMLPropertiesHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/XMLPropertiesHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/XMLPropertiesHelper.java
new file mode 100644
index 0000000..690a60f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/XMLPropertiesHelper.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.common;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.log4j.Logger;
+import org.springframework.util.DefaultPropertiesPersister;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+public class XMLPropertiesHelper extends DefaultPropertiesPersister {
+  private static Logger logger = Logger.getLogger(XMLPropertiesHelper.class);
+
+  public XMLPropertiesHelper() {
+  }
+
+  @Override
+  public void loadFromXml(Properties properties, InputStream inputStream)
+      throws IOException {
+    try {
+      DocumentBuilderFactory xmlDocumentBuilderFactory = DocumentBuilderFactory.newInstance();
+      xmlDocumentBuilderFactory.setIgnoringComments(true);
+      xmlDocumentBuilderFactory.setNamespaceAware(true);
+      DocumentBuilder xmlDocumentBuilder = xmlDocumentBuilderFactory.newDocumentBuilder();
+      Document xmlDocument = xmlDocumentBuilder.parse(inputStream);
+      if (xmlDocument != null) {
+        xmlDocument.getDocumentElement().normalize();
+        NodeList nList = xmlDocument.getElementsByTagName("property");
+        if (nList != null) {
+          for (int temp = 0; temp < nList.getLength(); temp++) {
+            Node nNode = nList.item(temp);
+            if (nNode.getNodeType() == Node.ELEMENT_NODE) {
+              Element eElement = (Element) nNode;
+              String propertyName = "";
+              String propertyValue = "";
+              if (eElement.getElementsByTagName("name") != null && eElement.getElementsByTagName("name").item(0) != null) {
+                propertyName = eElement.getElementsByTagName("name").item(0).getTextContent().trim();
+              }
+              if (eElement.getElementsByTagName("value") != null && eElement.getElementsByTagName("value").item(0) != null) {
+                propertyValue = eElement.getElementsByTagName("value").item(0).getTextContent().trim();
+              }
+              if (propertyName != null && !propertyName.isEmpty()) {
+                properties.put(propertyName, propertyValue);
+              }
+            }
+          }
+        }
+      }
+    } catch (Exception e) {
+      logger.error("Error loading xml properties ", e);
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index a6f77e9..64aa776 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -24,8 +24,8 @@ import java.util.Collection;
 
 import javax.annotation.PostConstruct;
 
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -41,15 +41,15 @@ public class AuditSolrDao extends SolrDaoBase {
 
   @PostConstruct
   public void postConstructor() {
-    String solrUrl = PropertiesUtil.getProperty("logsearch.solr.audit.logs.url");
-    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.audit.logs.zk_connect_string");
-    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.audit.logs", "audit_logs");
-    String aliasNameIn = PropertiesUtil.getProperty("logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
-    String rangerAuditCollection = PropertiesUtil.getProperty("logsearch.ranger.audit.logs.collection.name");
-    String splitInterval = PropertiesUtil.getProperty("logsearch.audit.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty("logsearch.solr.audit.logs.config.name", "audit_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.replication.factor", 1);
+    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.audit.logs.url");
+    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.audit.logs.zk_connect_string");
+    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.audit.logs", "audit_logs");
+    String aliasNameIn = PropertiesHelper.getProperty("logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
+    String rangerAuditCollection = PropertiesHelper.getProperty("logsearch.ranger.audit.logs.collection.name");
+    String splitInterval = PropertiesHelper.getProperty("logsearch.audit.logs.split.interval.mins", "none");
+    String configName = PropertiesHelper.getProperty("logsearch.solr.audit.logs.config.name", "audit_logs");
+    int numberOfShards = PropertiesHelper.getIntProperty("logsearch.collection.audit.logs.numshards", 1);
+    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.audit.logs.replication.factor", 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index af6d62d..6e2bb4b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -21,8 +21,8 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 
@@ -38,13 +38,13 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
   @PostConstruct
   public void postConstructor() {
     logger.info("postConstructor() called.");
-    String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
-    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs", "hadoop_logs");
-    String splitInterval = PropertiesUtil.getProperty("logsearch.service.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty("logsearch.solr.service.logs.config.name", "hadoop_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty("logsearch.collection.service.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.service.logs.replication.factor", 1);
+    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.url");
+    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
+    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.service.logs", "hadoop_logs");
+    String splitInterval = PropertiesHelper.getProperty("logsearch.service.logs.split.interval.mins", "none");
+    String configName = PropertiesHelper.getProperty("logsearch.solr.service.logs.config.name", "hadoop_logs");
+    int numberOfShards = PropertiesHelper.getIntProperty("logsearch.collection.service.logs.numshards", 1);
+    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.service.logs.replication.factor", 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 2129f9e..8cdb6eb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -26,12 +26,11 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 
+import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
@@ -56,7 +55,6 @@ import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
-import org.springframework.beans.factory.annotation.Autowired;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -75,11 +73,6 @@ public abstract class SolrDaoBase {
 
   private LogType logType;
 
-  @Autowired
-  protected JSONUtil jsonUtil;
-  @Autowired
-  protected RESTErrorUtil restErrorUtil;
-
   @VisibleForTesting
   protected String collectionName = null;
   @VisibleForTesting
@@ -446,7 +439,7 @@ public abstract class SolrDaoBase {
       }
       return queryResponse;
     } else {
-      throw restErrorUtil.createRESTException("Solr configuration improper for " + logType.getLabel() +" logs",
+      throw RESTErrorUtil.createRESTException("Solr configuration improper for " + logType.getLabel() +" logs",
           MessageEnums.ERROR_SYSTEM);
     }
   }
@@ -468,8 +461,8 @@ public abstract class SolrDaoBase {
   }
 
   private void setupSecurity() {
-    String jaasFile = PropertiesUtil.getProperty("logsearch.solr.jaas.file", "/etc/security/keytabs/logsearch_solr.service.keytab");
-    boolean securityEnabled = PropertiesUtil.getBooleanProperty("logsearch.solr.kerberos.enable", false);
+    String jaasFile = PropertiesHelper.getProperty("logsearch.solr.jaas.file", "/etc/security/keytabs/logsearch_solr.service.keytab");
+    boolean securityEnabled = PropertiesHelper.getBooleanProperty("logsearch.solr.kerberos.enable", false);
     if (securityEnabled) {
       System.setProperty("java.security.auth.login.config", jaasFile);
       HttpClientUtil.setConfigurer(new Krb5HttpClientConfigurer());
@@ -519,7 +512,7 @@ public abstract class SolrDaoBase {
     SolrRequest<SchemaResponse> request = new SchemaRequest();
     request.setMethod(METHOD.GET);
     request.setPath("/schema");
-    String historyCollection = PropertiesUtil.getProperty("logsearch.solr.collection.history","history");
+    String historyCollection = PropertiesHelper.getProperty("logsearch.solr.collection.history","history");
     if (solrClient != null && !collectionName.equals(historyCollection)) {
       NamedList<Object> namedList = null;
       try {
@@ -530,7 +523,7 @@ public abstract class SolrDaoBase {
       }
       
       if (namedList != null) {
-        ConfigUtil.extractSchemaFieldsName(namedList.toString(), schemaFieldsNameMap,schemaFieldTypeMap);
+        ConfigHelper.extractSchemaFieldsName(namedList.toString(), schemaFieldsNameMap,schemaFieldTypeMap);
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index a0c1134..026c78f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -29,6 +29,7 @@ import java.util.Scanner;
 import javax.annotation.PostConstruct;
 import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -42,7 +43,7 @@ import org.codehaus.jettison.json.JSONObject;
 import com.google.gson.JsonParseException;
 
 import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 import org.springframework.util.CollectionUtils;
@@ -59,11 +60,11 @@ public class UserConfigSolrDao extends SolrDaoBase {
 
   @PostConstruct
   public void postConstructor() {
-    String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
-    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.history", "history");
-    String configName = PropertiesUtil.getProperty("logsearch.solr.history.config.name", "history");
-    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.history.replication.factor", 2);
+    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.url");
+    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
+    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.history", "history");
+    String configName = PropertiesHelper.getProperty("logsearch.solr.history.config.name", "history");
+    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.history.replication.factor", 2);
     String splitInterval = "none";
     int numberOfShards = 1;
 
@@ -88,7 +89,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
 
   public void saveUserFilter(VLogfeederFilterWrapper logfeederFilterWrapper) throws SolrException, SolrServerException, IOException {
     String filterName = LogSearchConstants.LOGFEEDER_FILTER_NAME;
-    String json = jsonUtil.objToJson(logfeederFilterWrapper);
+    String json = JSONUtil.objToJson(logfeederFilterWrapper);
     SolrInputDocument configDocument = new SolrInputDocument();
     configDocument.addField(LogSearchConstants.ID, logfeederFilterWrapper.getId());
     configDocument.addField(LogSearchConstants.ROW_TYPE, filterName);
@@ -115,14 +116,14 @@ public class UserConfigSolrDao extends SolrDaoBase {
     VLogfeederFilterWrapper logfeederFilterWrapper = null;
     if (!CollectionUtils.isEmpty(documentList)) {
       SolrDocument configDoc = documentList.get(0);
-      String configJson = jsonUtil.objToJson(configDoc);
-      HashMap<String, Object> configMap = (HashMap<String, Object>) jsonUtil.jsonToMapObject(configJson);
+      String configJson = JSONUtil.objToJson(configDoc);
+      HashMap<String, Object> configMap = (HashMap<String, Object>) JSONUtil.jsonToMapObject(configJson);
       String json = (String) configMap.get(LogSearchConstants.VALUES);
-      logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
+      logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
       logfeederFilterWrapper.setId("" + configDoc.get(LogSearchConstants.ID));
 
     } else {
-      String logfeederDefaultLevels = PropertiesUtil.getProperty("logsearch.logfeeder.include.default.level", DEFAULT_LEVELS);
+      String logfeederDefaultLevels = PropertiesHelper.getProperty("logsearch.logfeeder.include.default.level", DEFAULT_LEVELS);
       JSONArray levelJsonArray = new JSONArray(Arrays.asList(logfeederDefaultLevels.split(",")));
 
       String hadoopServiceString = getHadoopServiceConfigJSON();
@@ -148,7 +149,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
           }
         }
         jsonValue.put("filter", componentList);
-        logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(jsonValue.toString(), VLogfeederFilterWrapper.class);
+        logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(jsonValue.toString(), VLogfeederFilterWrapper.class);
         logfeederFilterWrapper.setId(""+new Date().getTime());
         saveUserFilter(logfeederFilterWrapper);
 
@@ -181,7 +182,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
     }
 
     String hadoopServiceConfig = result.toString();
-    if (jsonUtil.isJSONValid(hadoopServiceConfig)) {
+    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
       return hadoopServiceConfig;
     }
     return null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index b7853df..39f0e25 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -28,11 +28,10 @@ import javax.annotation.PostConstruct;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.core.GrantedAuthority;
 import org.springframework.stereotype.Repository;
-
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.ambari.logsearch.util.FileUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.web.model.Privilege;
 import org.apache.ambari.logsearch.web.model.Role;
 import org.apache.ambari.logsearch.web.model.User;
@@ -52,10 +51,6 @@ public class UserDao {
   private static final String NAME = "name";
 
   @Autowired
-  private JSONUtil jsonUtil;
-  @Autowired
-  private FileUtil fileUtil;
-  @Autowired
   private LogsearchFileAuthenticationProvider fileAuthenticationProvider;
 
   private ArrayList<HashMap<String, String>> userList = null;
@@ -65,21 +60,21 @@ public class UserDao {
   public void initialization() {
     if (fileAuthenticationProvider.isEnable()) {
       try {
-        String userPassJsonFileName = PropertiesUtil.getProperty("logsearch.login.credentials.file");
+        String userPassJsonFileName = PropertiesHelper.getProperty("logsearch.login.credentials.file");
         logger.info("USER PASS JSON  file NAME:" + userPassJsonFileName);
-        File jsonFile = fileUtil.getFileFromClasspath(userPassJsonFileName);
+        File jsonFile = FileUtil.getFileFromClasspath(userPassJsonFileName);
         if (jsonFile == null || !jsonFile.exists()) {
           logger.fatal("user_pass json file not found in classpath :" + userPassJsonFileName);
           System.exit(1);
         }
-        HashMap<String, Object> userInfos = jsonUtil.readJsonFromFile(jsonFile);
+        HashMap<String, Object> userInfos = JSONUtil.readJsonFromFile(jsonFile);
         userList = (ArrayList<HashMap<String, String>>) userInfos.get("users");
         if (userList != null) {
           boolean isUpdated = this.encryptAllPassword();
           userInfos.put("users", userList);
           if (isUpdated) {
-            String jsonStr = jsonUtil.mapToJSON(userInfos);
-            jsonUtil.writeJSONInFile(jsonStr, jsonFile, true);
+            String jsonStr = JSONUtil.mapToJSON(userInfos);
+            JSONUtil.writeJSONInFile(jsonStr, jsonFile, true);
           }
         } else {
           userList = new ArrayList<HashMap<String, String>>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index 3793f50..d84b7b9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -47,8 +47,6 @@ import org.apache.solr.common.util.SimpleOrderedMap;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
-
-
 @Component
 public class GraphDataGenerator extends GraphDataGeneratorBase {
 
@@ -56,10 +54,6 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
 
   @Autowired
   private QueryGeneration queryGenerator;
-  @Autowired
-  private RESTErrorUtil restErrorUtil;
-  @Autowired
-  private SolrUtil solrUtil;
 
   public VBarDataList getAnyGraphData(SearchCriteria searchCriteria, SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
     // X axis credentials
@@ -131,10 +125,10 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
     VBarGraphData vBarGraphData = new VBarGraphData();
     Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-    queryGenerator.setMainQuery(solrQuery, null);
+    SolrUtil.setMainQuery(solrQuery, null);
     queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from + " TO " + to + "]");
     if (typeXAxis.contains("string") || typeXAxis.contains("key_lower_case") || typeXAxis.contains("text")) {
-      queryGenerator.setFacetField(solrQuery, xAxisField);
+      SolrUtil.setFacetField(solrQuery, xAxisField);
       try {
         QueryResponse response = solrDaoBase.process(solrQuery);
         if (response != null && response.getResults() != null) {
@@ -184,10 +178,10 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
         logger.error("Got exception for solr query :" + query, e.getCause());
       }
     } else {
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setRowCount(solrQuery, 0);
       String yAxis = yAxisField.contains("count") ? "sum" : yAxisField;
       String jsonQuery = queryGenerator.buildJSONFacetAggregatedFuncitonQuery(yAxis, xAxisField);
-      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
+      SolrUtil.setJSONFacet(solrQuery, jsonQuery);
       try {
         QueryResponse response = solrDaoBase.process(solrQuery);
         SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
@@ -218,17 +212,17 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     VBarDataList dataList = new VBarDataList();
     Collection<VBarGraphData> vGraphData = new ArrayList<VBarGraphData>();
     String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(fieldTime, from, to);
-    queryGenerator.setMainQuery(solrQuery, mainQuery);
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    SolrUtil.setMainQuery(solrQuery, mainQuery);
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     String jsonQuery = "";
-    if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
+    if (SolrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       String function = (yAxisField.contains("count")) ? "sum" : yAxisField;
       jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(stackField, xAxisField, function);
     } else {
       jsonQuery = queryGenerator.buildJsonFacetTermsRangeQuery(stackField, xAxisField);
     }
     try {
-      queryGenerator.setJSONFacet(solrQuery, jsonQuery);
+      SolrUtil.setJSONFacet(solrQuery, jsonQuery);
       dataList.setGraphData(vGraphData);
       QueryResponse response = solrDaoBase.process(solrQuery);
       if (response == null) {
@@ -268,7 +262,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     } catch (SolrException | IOException | SolrServerException e) {
       String query = solrQuery != null ? solrQuery.toQueryString() : "";
       logger.error("Got exception for solr query :" + query, e.getCause());
-      throw restErrorUtil.createRESTException(MessageEnums.DATA_NOT_FOUND.getMessage().getMessage(), MessageEnums.DATA_NOT_FOUND);
+      throw RESTErrorUtil.createRESTException(MessageEnums.DATA_NOT_FOUND.getMessage().getMessage(), MessageEnums.DATA_NOT_FOUND);
     }
   }
 
@@ -279,13 +273,13 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
     VBarGraphData vBarGraphData = new VBarGraphData();
     Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-    queryGenerator.setMainQuery(solrQuery, null);
-    if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
+    SolrUtil.setMainQuery(solrQuery, null);
+    if (SolrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
       return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase, typeXAxis, fieldTime, solrQuery);
     } else {
       try {
-        queryGenerator.setFacetRange(solrQuery, xAxisField, from, to, unit);
+        SolrUtil.setFacetRange(solrQuery, xAxisField, from, to, unit);
         QueryResponse response = solrDaoBase.process(solrQuery);
         if (response != null) {
           Long count = response.getResults().getNumFound();
@@ -322,13 +316,13 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    SolrUtil.setMainQuery(solrQuery, null);
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     String jsonHistogramQuery =
         queryGenerator.buildJSONFacetTermTimeRangeQuery(stackField, xAxisField, from, to, unit).replace("\\", "");
     try {
       solrQuery.set("json.facet", jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setRowCount(solrQuery, 0);
       QueryResponse response = solrDaoBase.process(solrQuery);
       if (response != null) {
         SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
index 49006e2..e7fab9a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
@@ -30,13 +30,9 @@ import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
 
 class GraphDataGeneratorBase extends MgrBase {
 
-  @Autowired
-  private DateUtil dateUtil;
-
   private static final String BUCKETS = "buckets";
   
   private static enum DataType {
@@ -86,7 +82,7 @@ class GraphDataGeneratorBase extends MgrBase {
                   for (Object levelBucket : levelBuckets) {
                     SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) levelBucket;
                     if (countValue != null) {
-                      String innerName = dateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
+                      String innerName = DateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
                       String innerValue = countValue.getVal(1) != null ? countValue.getVal(1).toString() : "";
                       VNameValue vNameValue = new VNameValue(innerName, innerValue);
                       vNameValues.add(vNameValue);

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
index 947fdbb..172ec81 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
@@ -32,17 +32,18 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 
+import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.AuditSolrDao;
 import org.apache.ambari.logsearch.graph.GraphDataGenerator;
 import org.apache.ambari.logsearch.util.BizUtil;
-import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VGroupList;
@@ -72,12 +73,6 @@ public class AuditMgr extends MgrBase {
   @Autowired
   private AuditSolrDao auditSolrDao;
   @Autowired
-  private RESTErrorUtil restErrorUtil;
-  @Autowired
-  private BizUtil bizUtil;
-  @Autowired
-  private DateUtil dateUtil;
-  @Autowired
   private GraphDataGenerator graphDataGenerator;
 
   public String getLogs(SearchCriteria searchCriteria) {
@@ -101,8 +96,8 @@ public class AuditMgr extends MgrBase {
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     SolrDocumentList docList = new SolrDocumentList();
     try {
-      queryGenerator.setFacetField(solrQuery, LogSearchConstants.AUDIT_COMPONENT);
-      queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+      SolrUtil.setFacetField(solrQuery, LogSearchConstants.AUDIT_COMPONENT);
+      SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
       List<FacetField> facetFields = null;
       List<Count> componentsCount = new ArrayList<Count>();
       FacetField facetField = null;
@@ -131,7 +126,7 @@ public class AuditMgr extends MgrBase {
       return docList;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -157,8 +152,8 @@ public class AuditMgr extends MgrBase {
       LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
 
     try {
-      queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery, 0);
       QueryResponse response = auditSolrDao.process(solrQuery);
       if (response == null){
         return convertObjToString(dataList);
@@ -176,7 +171,7 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
 
     }
   }
@@ -195,9 +190,9 @@ public class AuditMgr extends MgrBase {
 
       VNameValueList nameValueList = new VNameValueList(nameValues);
 
-      queryGenerator.setFacetField(solrQuery, facetField);
-      queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_COUNT);
-      queryGenerator.setFacetLimit(solrQuery, top.intValue());
+      SolrUtil.setFacetField(solrQuery, facetField);
+      SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_COUNT);
+      SolrUtil.setFacetLimit(solrQuery, top.intValue());
 
       List<Count> countList = new ArrayList<FacetField.Count>();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
@@ -223,7 +218,7 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -234,11 +229,11 @@ public class AuditMgr extends MgrBase {
     solrQuery.setParam("event", "/audit/logs/live/count");
     try {
       Date[] timeRange = ManageStartEndTime.getStartEndTime();
-      String startDate = dateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[0]);
-      String endDate = dateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[1]);
+      String startDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[0]);
+      String endDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[1]);
 
-      queryGenerator.setMainQuery(solrQuery, null);
-      queryGenerator.setFacetRange(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startDate, endDate, "+2MINUTE");
+      SolrUtil.setMainQuery(solrQuery, null);
+      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startDate, endDate, "+2MINUTE");
       List<RangeFacet.Count> listCount;
 
       QueryResponse response = auditSolrDao.process(solrQuery);
@@ -270,7 +265,7 @@ public class AuditMgr extends MgrBase {
     } catch (SolrException | SolrServerException | ParseException
       | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -279,8 +274,8 @@ public class AuditMgr extends MgrBase {
     String jsonUserQuery =
         "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
-    queryGenerator.setRowCount(solrQuery, 0);
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
     try {
       VBarDataList vBarDataList = new VBarDataList();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
@@ -302,12 +297,12 @@ public class AuditMgr extends MgrBase {
       if (jsonFacetResponse.toString().equals("{count=0}")) {
         return convertObjToString(vBarDataList);
       }
-      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
+      vBarDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       return convertObjToString(vBarDataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -316,8 +311,8 @@ public class AuditMgr extends MgrBase {
     String jsonUserQuery =
         "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
-    queryGenerator.setRowCount(solrQuery, 0);
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
     try {
       VBarDataList vBarDataList = new VBarDataList();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
@@ -333,12 +328,12 @@ public class AuditMgr extends MgrBase {
       @SuppressWarnings("unchecked")
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
 
-      vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
+      vBarDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       return convertObjToString(vBarDataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -354,14 +349,14 @@ public class AuditMgr extends MgrBase {
     VBarDataList dataList = new VBarDataList();
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
 
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
 
     String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_REQUEST_USER,
         LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
 
     try {
-      queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery, 0);
       QueryResponse response = auditSolrDao.process(solrQuery);
       if (response == null){
         return convertObjToString(dataList);
@@ -378,19 +373,19 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
   }
 
   public String getAuditLogsSchemaFieldsName() {
-    String excludeArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
+    String excludeArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
     List<String> fieldNames = new ArrayList<String>();
     HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
-    ConfigUtil.getSchemaFieldsName(excludeArray, fieldNames,auditSolrDao);
+    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames,auditSolrDao);
 
     for (String fieldName : fieldNames) {
-      String uiField = ConfigUtil.auditLogsColumnMapping.get(fieldName + LogSearchConstants.SOLR_SUFFIX);
+      String uiField = ConfigHelper.auditLogsColumnMapping.get(fieldName + LogSearchConstants.SOLR_SUFFIX);
       if (uiField == null) {
         uiFieldColumnMapping.put(fieldName, fieldName);
       } else {
@@ -398,7 +393,7 @@ public class AuditMgr extends MgrBase {
       }
     }
 
-    uiFieldColumnMapping = bizUtil.sortHashMapByValues(uiFieldColumnMapping);
+    uiFieldColumnMapping = BizUtil.sortHashMapByValues(uiFieldColumnMapping);
     return convertObjToString(uiFieldColumnMapping);
 
   }
@@ -431,7 +426,7 @@ public class AuditMgr extends MgrBase {
       ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level.get(innerField)).get("buckets");
       for (Object temp1 : levelBuckets) {
         SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = dateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
+        String value = DateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
 
         String count = "" + countValue.getVal(1);
         VNameValue vNameValue = new VNameValue();
@@ -455,8 +450,8 @@ public class AuditMgr extends MgrBase {
     startTime = startTime == null ? "" : startTime;
     endTime = endTime == null ? "" : "_" + endTime;
 
-    queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
-    queryGenerator.setRowCount(solrQuery, 0);
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
 
     String dataFormat = (String) searchCriteria.getParamValue("format");
     FileOutputStream fis = null;
@@ -465,21 +460,21 @@ public class AuditMgr extends MgrBase {
       if(queryResponse == null){
         VResponse response = new VResponse();
         response.setMsgDesc("Query was not able to execute "+solrQuery);
-        throw restErrorUtil.createRESTException(response);
+        throw RESTErrorUtil.createRESTException(response);
       }
 
       NamedList<Object> namedList = queryResponse.getResponse();
       if (namedList == null) {
         VResponse response = new VResponse();
         response.setMsgDesc("Query was not able to execute "+solrQuery);
-        throw restErrorUtil.createRESTException(response);
+        throw RESTErrorUtil.createRESTException(response);
       }
       VBarDataList vBarUserDataList = new VBarDataList();
       VBarDataList vBarResourceDataList = new VBarDataList();
 
       SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
-      vBarUserDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
-      vBarResourceDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"y","x");
+      vBarUserDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
+      vBarResourceDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"y","x");
       String data = "";
       String summary = "";
       if ("text".equals(dataFormat)) {
@@ -570,7 +565,7 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     } finally {
       if (fis != null) {
         try {
@@ -599,7 +594,7 @@ public class AuditMgr extends MgrBase {
     SolrQuery serivceLoadQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
     try {
-      queryGenerator.setFacetField(serivceLoadQuery, LogSearchConstants.AUDIT_COMPONENT);
+      SolrUtil.setFacetField(serivceLoadQuery, LogSearchConstants.AUDIT_COMPONENT);
       QueryResponse serviceLoadResponse = auditSolrDao.process(serivceLoadQuery);
       if (serviceLoadResponse == null){
         return convertObjToString(dataList);
@@ -629,7 +624,7 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
index 8badb61..e227c6c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
@@ -28,6 +28,8 @@ import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.AuditSolrDao;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.view.VLogFile;
 import org.apache.ambari.logsearch.view.VLogFileList;
 import org.apache.ambari.logsearch.view.VSolrLogList;
@@ -61,8 +63,8 @@ public class LogFileMgr extends MgrBase {
     String host = (String) searchCriteria.getParamValue("host");
     int minCount = 1;// to remove zero count facet
     SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
+    SolrUtil.setMainQuery(solrQuery, null);
+    SolrUtil.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
     // adding filter
     queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, componentName);
     queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, host);
@@ -77,7 +79,7 @@ public class LogFileMgr extends MgrBase {
       } else if (logType.equalsIgnoreCase(LogType.AUDIT.name())) {
         daoMgr = auditSolrDao;
       } else {
-        throw restErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
+        throw RESTErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
       }
       QueryResponse queryResponse = daoMgr.process(solrQuery);
       if (queryResponse.getFacetField(LogSearchConstants.SOLR_PATH) != null) {
@@ -96,7 +98,7 @@ public class LogFileMgr extends MgrBase {
       }
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error in solr query  :" + e.getLocalizedMessage() + "\n Query :" + solrQuery.toQueryString(), e.getCause());
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     logFileList.setLogFiles(logFiles);
     String jsonStr = "";
@@ -111,31 +113,31 @@ public class LogFileMgr extends MgrBase {
     String component = (String) searchCriteria.getParamValue("component");
     String tailSize = (String) searchCriteria.getParamValue("tailSize");
     if (StringUtils.isBlank(host)) {
-      throw restErrorUtil.createRESTException("missing Host Name", MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException("missing Host Name", MessageEnums.ERROR_SYSTEM);
     }
     tailSize = (StringUtils.isBlank(tailSize)) ? "10" : tailSize;
     SolrQuery logFileTailQuery = new SolrQuery();
     try {
       int tail = Integer.parseInt(tailSize);
       tail = tail > 100 ? 100 : tail;
-      queryGenerator.setMainQuery(logFileTailQuery, null);
+      SolrUtil.setMainQuery(logFileTailQuery, null);
       queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_HOST, host);
       if (!StringUtils.isBlank(logFile)) {
-        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_PATH, solrUtil.makeSolrSearchString(logFile));
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_PATH, SolrUtil.makeSolrSearchString(logFile));
       } else if (!StringUtils.isBlank(component)) {
         queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_COMPONENT, component);
       } else {
-        throw restErrorUtil.createRESTException("component or logfile parameter must be present", MessageEnums.ERROR_SYSTEM);
+        throw RESTErrorUtil.createRESTException("component or logfile parameter must be present", MessageEnums.ERROR_SYSTEM);
       }
 
-      queryGenerator.setRowCount(logFileTailQuery, tail);
+      SolrUtil.setRowCount(logFileTailQuery, tail);
       queryGenerator.setSortOrderDefaultServiceLog(logFileTailQuery, new SearchCriteria());
       VSolrLogList solrLogList = getLogAsPaginationProvided(logFileTailQuery, serviceLogsSolrDao);
       return convertObjToString(solrLogList);
 
     } catch (NumberFormatException ne) {
 
-      throw restErrorUtil.createRESTException(ne.getMessage(),
+      throw RESTErrorUtil.createRESTException(ne.getMessage(),
         MessageEnums.ERROR_SYSTEM);
 
     }


[04/50] [abbrv] ambari git commit: AMBARI-18193. Zeppelin logsearch configuration typo (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18193. Zeppelin logsearch configuration typo (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/93fefb4b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/93fefb4b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/93fefb4b

Branch: refs/heads/branch-dev-logsearch
Commit: 93fefb4b422e69f8fad86baf5c07c27f82701564
Parents: f8cf23e
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Mon Aug 22 10:59:02 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../common-services/LOGSEARCH/0.5.0/package/scripts/params.py      | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/93fefb4b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index cb7b79f..5c3954f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -205,7 +205,7 @@ ranger_kms_log_dir = default('/configurations/kms-env/kms_log_dir', '/var/log/ra
 storm_log_dir = default('/configurations/storm-env/storm_log_dir', '/var/log/storm')
 yarn_log_dir_prefix = default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')
 mapred_log_dir_prefix = default('/configurations/mapred-env/mapred_log_dir_prefix', '/var/log/hadoop')
-zeppelin_log_dir = default('/configuration/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')
+zeppelin_log_dir = default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')
 zk_log_dir = default('/configurations/zookeeper-env/zk_log_dir', '/var/log/zookeeper')
 spark_log_dir = default('/configurations/spark-env/spark_log_dir', '/var/log/spark')
 livy_log_dir = default('/configurations/livy-env/livy_log_dir', '/var/log/livy')


[20/50] [abbrv] ambari git commit: AMBARI-18236. Fix package structure in Logfeeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogfeederException.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogfeederException.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogfeederException.java
new file mode 100644
index 0000000..8a07602
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogfeederException.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.common;
+
+public class LogfeederException extends Exception {
+
+  public LogfeederException(String message, Throwable throwable) {
+    super(message, throwable);
+  }
+
+  public LogfeederException(String message) {
+    super(message);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/exception/LogfeederException.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/exception/LogfeederException.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/exception/LogfeederException.java
deleted file mode 100644
index c22b512..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/exception/LogfeederException.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.exception;
-
-public class LogfeederException extends Exception {
-
-  public LogfeederException(String message, Throwable throwable) {
-    super(message, throwable);
-  }
-
-  public LogfeederException(String message) {
-    super(message);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
index 01d4f79..ab371f1 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
@@ -24,17 +24,17 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.AliasUtil;
-import org.apache.ambari.logfeeder.ConfigBlock;
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.MetricCount;
-import org.apache.ambari.logfeeder.OutputMgr;
-import org.apache.ambari.logfeeder.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.AliasUtil.ALIAS_TYPE;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.ConfigBlock;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.util.AliasUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
+import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
 import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
index 7aa649d..372c208 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
@@ -34,10 +34,10 @@ import java.util.regex.Pattern;
 import oi.thekraken.grok.api.Grok;
 import oi.thekraken.grok.api.exception.GrokException;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.MetricCount;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
index f375374..2954106 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
@@ -20,9 +20,9 @@ package org.apache.ambari.logfeeder.filter;
 
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public class FilterJSON extends Filter {

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
index 1b8b3a3..7adb468 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
@@ -23,10 +23,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.MetricCount;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
index 76af16c..5feb9c4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
@@ -26,13 +26,12 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.ConfigBlock;
-import org.apache.ambari.logfeeder.InputMgr;
-import org.apache.ambari.logfeeder.MetricCount;
-import org.apache.ambari.logfeeder.OutputMgr;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.ConfigBlock;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
 import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.output.OutputMgr;
 import org.apache.log4j.Logger;
 
 public abstract class Input extends ConfigBlock implements Runnable {

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
index 9d3545e..c9f5ded 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
@@ -33,8 +33,8 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
new file mode 100644
index 0000000..b18c9b0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
@@ -0,0 +1,451 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.log4j.Logger;
+import org.apache.solr.common.util.Base64;
+
+public class InputMgr {
+  private static final Logger logger = Logger.getLogger(InputMgr.class);
+
+  private List<Input> inputList = new ArrayList<Input>();
+  private Set<Input> notReadyList = new HashSet<Input>();
+
+  private boolean isDrain = false;
+  private boolean isAnyInputTail = false;
+
+  private String checkPointSubFolderName = "logfeeder_checkpoints";
+  private File checkPointFolderFile = null;
+
+  private MetricCount filesCountMetric = new MetricCount();
+
+  private String checkPointExtension = ".cp";
+  
+  private Thread inputIsReadyMonitor = null;
+
+  public List<Input> getInputList() {
+    return inputList;
+  }
+
+  public void add(Input input) {
+    inputList.add(input);
+  }
+
+  public void removeInput(Input input) {
+    logger.info("Trying to remove from inputList. "
+      + input.getShortDescription());
+    Iterator<Input> iter = inputList.iterator();
+    while (iter.hasNext()) {
+      Input iterInput = iter.next();
+      if (iterInput.equals(input)) {
+        logger.info("Removing Input from inputList. "
+          + input.getShortDescription());
+        iter.remove();
+      }
+    }
+  }
+
+  public int getActiveFilesCount() {
+    int count = 0;
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        count++;
+      }
+    }
+    return count;
+  }
+
+  public void init() {
+    filesCountMetric.metricsName = "input.files.count";
+    filesCountMetric.isPointInTime = true;
+
+    checkPointExtension = LogFeederUtil.getStringProperty(
+      "logfeeder.checkpoint.extension", checkPointExtension);
+    for (Input input : inputList) {
+      try {
+        input.init();
+        if (input.isTail()) {
+          isAnyInputTail = true;
+        }
+      } catch (Exception e) {
+        logger.error(
+          "Error initializing input. "
+            + input.getShortDescription(), e);
+      }
+    }
+
+    if (isAnyInputTail) {
+      logger.info("Determining valid checkpoint folder");
+      boolean isCheckPointFolderValid = false;
+      // We need to keep track of the files we are reading.
+      String checkPointFolder = LogFeederUtil
+        .getStringProperty("logfeeder.checkpoint.folder");
+      if (checkPointFolder != null && !checkPointFolder.isEmpty()) {
+        checkPointFolderFile = new File(checkPointFolder);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's try home folder
+        String userHome = LogFeederUtil.getStringProperty("user.home");
+        if (userHome != null) {
+          checkPointFolderFile = new File(userHome,
+            checkPointSubFolderName);
+          logger.info("Checking if home folder can be used for checkpoints. Folder="
+            + checkPointFolderFile);
+          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        }
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's use tmp folder
+        String tmpFolder = LogFeederUtil
+          .getStringProperty("java.io.tmpdir");
+        if (tmpFolder == null) {
+          tmpFolder = "/tmp";
+        }
+        checkPointFolderFile = new File(tmpFolder,
+          checkPointSubFolderName);
+        logger.info("Checking if tmps folder can be used for checkpoints. Folder="
+          + checkPointFolderFile);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        if (isCheckPointFolderValid) {
+          logger.warn("Using tmp folder "
+            + checkPointFolderFile
+            + " to store check points. This is not recommended."
+            + "Please set logfeeder.checkpoint.folder property");
+        }
+      }
+
+      if (isCheckPointFolderValid) {
+        logger.info("Using folder " + checkPointFolderFile
+          + " for storing checkpoints");
+      }
+    }
+
+  }
+
+  public File getCheckPointFolderFile() {
+    return checkPointFolderFile;
+  }
+
+  private boolean verifyCheckPointFolder(File folderPathFile) {
+    if (!folderPathFile.exists()) {
+      // Create the folder
+      try {
+        if (!folderPathFile.mkdir()) {
+          logger.warn("Error creating folder for check point. folder="
+            + folderPathFile);
+        }
+      } catch (Throwable t) {
+        logger.warn("Error creating folder for check point. folder="
+          + folderPathFile, t);
+      }
+    }
+
+    if (folderPathFile.exists() && folderPathFile.isDirectory()) {
+      // Let's check whether we can create a file
+      File testFile = new File(folderPathFile, UUID.randomUUID()
+        .toString());
+      try {
+        testFile.createNewFile();
+        return testFile.delete();
+      } catch (IOException e) {
+        logger.warn(
+          "Couldn't create test file in "
+            + folderPathFile.getAbsolutePath()
+            + " for checkPoint", e);
+      }
+    }
+    return false;
+  }
+
+  public void monitor() {
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        input.monitor();
+      } else {
+        if (input.isTail()) {
+          logger.info("Adding input to not ready list. Note, it is possible this component is not run on this host. So it might not be an issue. "
+            + input.getShortDescription());
+          notReadyList.add(input);
+        } else {
+          logger.info("Input is not ready, so going to ignore it "
+            + input.getShortDescription());
+        }
+      }
+    }
+    // Start the monitoring thread if any file is in tail mode
+    if (isAnyInputTail) {
+       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
+        @Override
+        public void run() {
+          logger.info("Going to monitor for these missing files: "
+            + notReadyList.toString());
+          while (true) {
+            if (isDrain) {
+              logger.info("Exiting missing file monitor.");
+              break;
+            }
+            try {
+              Iterator<Input> iter = notReadyList.iterator();
+              while (iter.hasNext()) {
+                Input input = iter.next();
+                try {
+                  if (input.isReady()) {
+                    input.monitor();
+                    iter.remove();
+                  }
+                } catch (Throwable t) {
+                  logger.error("Error while enabling monitoring for input. "
+                    + input.getShortDescription());
+                }
+              }
+              Thread.sleep(30 * 1000);
+            } catch (Throwable t) {
+              // Ignore
+            }
+          }
+        }
+      };
+      inputIsReadyMonitor.start();
+    }
+  }
+
+  public void addToNotReady(Input notReadyInput) {
+    notReadyList.add(notReadyInput);
+  }
+
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    for (Input input : inputList) {
+      input.addMetricsContainers(metricsList);
+    }
+    filesCountMetric.count = getActiveFilesCount();
+    metricsList.add(filesCountMetric);
+  }
+
+  public void logStats() {
+    for (Input input : inputList) {
+      input.logStat();
+    }
+
+    filesCountMetric.count = getActiveFilesCount();
+    LogFeederUtil.logStatForMetric(filesCountMetric,
+      "Stat: Files Monitored Count", null);
+  }
+
+  public void close() {
+    for (Input input : inputList) {
+      try {
+        input.setDrain(true);
+      } catch (Throwable t) {
+        logger.error(
+          "Error while draining. input="
+            + input.getShortDescription(), t);
+      }
+    }
+    isDrain = true;
+
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    int i = 0;
+    boolean allClosed = true;
+    for (i = 0; i < iterations; i++) {
+      allClosed = true;
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          try {
+            allClosed = false;
+            logger.warn("Waiting for input to close. "
+              + input.getShortDescription() + ", "
+              + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        break;
+      }
+    }
+    if (!allClosed) {
+      logger.warn("Some inputs were not closed. Iterations=" + i);
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          logger.warn("Input not closed. Will ignore it."
+            + input.getShortDescription());
+        }
+      }
+    } else {
+      logger.info("All inputs are closed. Iterations=" + i);
+    }
+
+  }
+
+  public void checkInAll() {
+    for (Input input : inputList) {
+      input.checkIn();
+    }
+  }
+
+  public void cleanCheckPointFiles() {
+
+    if (checkPointFolderFile == null) {
+      logger.info("Will not clean checkPoint files. checkPointFolderFile="
+        + checkPointFolderFile);
+      return;
+    }
+    logger.info("Cleaning checkPoint files. checkPointFolderFile="
+      + checkPointFolderFile.getAbsolutePath());
+    try {
+      // Loop over the check point files and if filePath is not present, then move to closed
+      String searchPath = "*" + checkPointExtension;
+      FileFilter fileFilter = new WildcardFileFilter(searchPath);
+      File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
+      int totalCheckFilesDeleted = 0;
+      for (File checkPointFile : checkPointFiles) {
+        RandomAccessFile checkPointReader = null;
+        try {
+          checkPointReader = new RandomAccessFile(checkPointFile, "r");
+
+          int contentSize = checkPointReader.readInt();
+          byte b[] = new byte[contentSize];
+          int readSize = checkPointReader.read(b, 0, contentSize);
+          if (readSize != contentSize) {
+            logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
+              + contentSize
+              + ", read="
+              + readSize
+              + ", checkPointFile=" + checkPointFile);
+          } else {
+            // Create JSON string
+            String jsonCheckPointStr = new String(b, 0, readSize);
+            Map<String, Object> jsonCheckPoint = LogFeederUtil
+              .toJSONObject(jsonCheckPointStr);
+
+            String logFilePath = (String) jsonCheckPoint
+              .get("file_path");
+            String logFileKey = (String) jsonCheckPoint
+              .get("file_key");
+            if (logFilePath != null && logFileKey != null) {
+              boolean deleteCheckPointFile = false;
+              File logFile = new File(logFilePath);
+              if (logFile.exists()) {
+                Object fileKeyObj = InputFile
+                  .getFileKey(logFile);
+                String fileBase64 = Base64
+                  .byteArrayToBase64(fileKeyObj
+                    .toString().getBytes());
+                if (!logFileKey.equals(fileBase64)) {
+                  deleteCheckPointFile = true;
+                  logger.info("CheckPoint clean: File key has changed. old="
+                    + logFileKey
+                    + ", new="
+                    + fileBase64
+                    + ", filePath="
+                    + logFilePath
+                    + ", checkPointFile="
+                    + checkPointFile.getAbsolutePath());
+                }
+              } else {
+                logger.info("CheckPoint clean: Log file doesn't exist. filePath="
+                  + logFilePath
+                  + ", checkPointFile="
+                  + checkPointFile.getAbsolutePath());
+                deleteCheckPointFile = true;
+              }
+              if (deleteCheckPointFile) {
+                logger.info("Deleting CheckPoint file="
+                  + checkPointFile.getAbsolutePath()
+                  + ", logFile=" + logFilePath);
+                checkPointFile.delete();
+                totalCheckFilesDeleted++;
+              }
+            }
+          }
+        } catch (EOFException eof) {
+          logger.warn("Caught EOFException. Ignoring reading existing checkPoint file. "
+            + checkPointFile);
+        } catch (Throwable t) {
+          logger.error("Error while checking checkPoint file. "
+            + checkPointFile, t);
+        } finally {
+          if (checkPointReader != null) {
+            try {
+              checkPointReader.close();
+            } catch (Throwable t) {
+              logger.error("Error closing checkPoint file. "
+                + checkPointFile, t);
+            }
+          }
+        }
+      }
+      logger.info("Deleted " + totalCheckFilesDeleted
+        + " checkPoint file(s). checkPointFolderFile="
+        + checkPointFolderFile.getAbsolutePath());
+
+    } catch (Throwable t) {
+      logger.error("Error while cleaning checkPointFiles", t);
+    }
+  }
+
+  public void waitOnAllInputs() {
+    //wait on inputs
+    if (inputList != null) {
+      for (Input input : inputList) {
+        if (input != null) {
+          Thread inputThread = input.getThread();
+          if (inputThread != null) {
+            try {
+              inputThread.join();
+            } catch (InterruptedException e) {
+              // ignore
+            }
+          }
+        }
+      }
+    }
+    // wait on monitor
+    if (inputIsReadyMonitor != null) {
+      try {
+        this.close();
+        inputIsReadyMonitor.join();
+      } catch (InterruptedException e) {
+        // ignore
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
index 12a512f..c9d28bd 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
@@ -28,8 +28,8 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.s3.S3Util;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.S3Util;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
index 48ad7ac..5ba56a5 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
@@ -27,9 +27,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.filter.FilterJSON;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.solr.common.util.Base64;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
index 872460b..ae0cfc0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
@@ -26,7 +26,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.TimeZone;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.SolrUtil;
 import org.apache.ambari.logfeeder.view.VLogfeederFilter;
 import org.apache.ambari.logfeeder.view.VLogfeederFilterWrapper;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
index 128c5c4..bc807193 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
@@ -22,7 +22,7 @@ package org.apache.ambari.logfeeder.logconfig;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public enum LogfeederScheduler {

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
index 8691a19..b5e4eb3 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
@@ -22,9 +22,9 @@ package org.apache.ambari.logfeeder.logconfig.filter;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
 import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.view.VLogfeederFilter;
 import org.apache.log4j.Logger;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
index bf33f93..3a8eae9 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
@@ -21,8 +21,8 @@ package org.apache.ambari.logfeeder.logconfig.filter;
 
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.logconfig.filter.ApplyLogFilter;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
index 45ccc70..9aa0b23 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
@@ -23,7 +23,7 @@ import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
index e1f8f97..c692a9d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logfeeder.mapper;
 
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
index 7e530f5..e618261 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logfeeder.mapper;
 
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
new file mode 100644
index 0000000..0a0f4e9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.log4j.Logger;
+
+// TODO: Refactor for failover
+public class LogFeederAMSClient extends AbstractTimelineMetricsSink {
+  private static final Logger logger = Logger.getLogger(LogFeederAMSClient.class);
+
+  private String collectorHosts = null;
+
+  public LogFeederAMSClient() {
+    collectorHosts = LogFeederUtil
+      .getStringProperty("logfeeder.metrics.collector.hosts");
+    if (collectorHosts != null && collectorHosts.trim().length() == 0) {
+      collectorHosts = null;
+    }
+    if (collectorHosts != null) {
+      collectorHosts = collectorHosts.trim();
+    }
+    logger.info("AMS collector URL=" + collectorHosts);
+  }
+
+  @Override
+  public String getCollectorUri(String host) {
+    return collectorHosts;
+  }
+
+  @Override
+  protected int getTimeoutSeconds() {
+    // TODO: Hard coded timeout
+    return 10;
+  }
+
+  @Override
+  protected String getZookeeperQuorum() {
+    return null;
+  }
+
+  @Override
+  protected String getConfiguredCollectors() {
+    return null;
+  }
+
+  @Override
+  protected String getHostname() {
+    return null;
+  }
+
+  @Override
+  protected boolean emitMetrics(TimelineMetrics metrics) {
+    return super.emitMetrics(metrics);
+  }
+
+  @Override
+  protected String getCollectorProtocol() {
+    return null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
new file mode 100644
index 0000000..abb84c7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+public class MetricCount {
+  public String metricsName = null;
+  public boolean isPointInTime = false;
+
+  public long count = 0;
+  public long prevLogCount = 0;
+  public long prevLogMS = System.currentTimeMillis();
+  public long prevPublishCount = 0;
+  public int publishCount = 0; // Count of published metrics. Used for first time sending metrics
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
new file mode 100644
index 0000000..33397c7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
@@ -0,0 +1,178 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.log4j.Logger;
+
+public class MetricsMgr {
+  private static final Logger logger = Logger.getLogger(MetricsMgr.class);
+
+  private boolean isMetricsEnabled = false;
+  private String nodeHostName = null;
+  private String appId = "logfeeder";
+
+  private long lastPublishTimeMS = 0; // Let's do the first publish immediately
+  private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock
+
+  private int publishIntervalMS = 60 * 1000;
+  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep
+  // the metrics in memory forever
+  private HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
+  private LogFeederAMSClient amsClient = null;
+
+  public void init() {
+    logger.info("Initializing MetricsMgr()");
+    amsClient = new LogFeederAMSClient();
+
+    if (amsClient.getCollectorUri(null) != null) {
+      nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
+      if (nodeHostName == null) {
+        try {
+          nodeHostName = InetAddress.getLocalHost().getHostName();
+        } catch (Throwable e) {
+          logger.warn(
+            "Error getting hostname using InetAddress.getLocalHost().getHostName()",
+            e);
+        }
+        if (nodeHostName == null) {
+          try {
+            nodeHostName = InetAddress.getLocalHost()
+              .getCanonicalHostName();
+          } catch (Throwable e) {
+            logger.warn(
+              "Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()",
+              e);
+          }
+        }
+      }
+      if (nodeHostName == null) {
+        isMetricsEnabled = false;
+        logger.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
+      } else {
+        isMetricsEnabled = true;
+        logger.info("LogFeeder Metrics is enabled. Metrics host="
+          + amsClient.getCollectorUri(null));
+      }
+    } else {
+      logger.info("LogFeeder Metrics publish is disabled");
+    }
+  }
+
+  public boolean isMetricsEnabled() {
+    return isMetricsEnabled;
+  }
+
+  synchronized public void useMetrics(List<MetricCount> metricsList) {
+    if (!isMetricsEnabled) {
+      return;
+    }
+    logger.info("useMetrics() metrics.size=" + metricsList.size());
+    long currMS = System.currentTimeMillis();
+    Long currMSLong = new Long(currMS);
+    for (MetricCount metric : metricsList) {
+      if (metric.metricsName == null) {
+        logger.debug("metric.metricsName is null");
+        // Metrics is not meant to be published
+        continue;
+      }
+      long currCount = metric.count;
+      if (!metric.isPointInTime && metric.publishCount > 0
+        && currCount <= metric.prevPublishCount) {
+        // No new data added, so let's ignore it
+        logger.debug("Nothing changed. " + metric.metricsName
+          + ", currCount=" + currCount + ", prevPublishCount="
+          + metric.prevPublishCount);
+        continue;
+      }
+      metric.publishCount++;
+
+      TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
+      if (timelineMetric == null) {
+        logger.debug("Creating new metric obbject for "
+          + metric.metricsName);
+        // First time for this metric
+        timelineMetric = new TimelineMetric();
+        timelineMetric.setMetricName(metric.metricsName);
+        timelineMetric.setHostName(nodeHostName);
+        timelineMetric.setAppId(appId);
+        timelineMetric.setStartTime(currMS);
+        timelineMetric.setType("Long");
+        timelineMetric.setMetricValues(new TreeMap<Long, Double>());
+
+        metricsMap.put(metric.metricsName, timelineMetric);
+      }
+      logger.debug("Adding metrics=" + metric.metricsName);
+      if (metric.isPointInTime) {
+        timelineMetric.getMetricValues().put(currMSLong,
+          new Double(currCount));
+      } else {
+        Double value = timelineMetric.getMetricValues().get(currMSLong);
+        if (value == null) {
+          value = new Double(0);
+        }
+        value += (currCount - metric.prevPublishCount);
+        timelineMetric.getMetricValues().put(currMSLong, value);
+        metric.prevPublishCount = currCount;
+      }
+    }
+
+    if (metricsMap.size() > 0
+      && currMS - lastPublishTimeMS > publishIntervalMS) {
+      try {
+        // Time to publish
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        List<TimelineMetric> timeLineMetricList = new ArrayList<TimelineMetric>();
+        timeLineMetricList.addAll(metricsMap.values());
+        timelineMetrics.setMetrics(timeLineMetricList);
+        amsClient.emitMetrics(timelineMetrics);
+        logger.info("Published " + timeLineMetricList.size()
+          + " metrics to AMS");
+        metricsMap.clear();
+        timeLineMetricList.clear();
+        lastPublishTimeMS = currMS;
+      } catch (Throwable t) {
+        logger.warn("Error sending metrics to AMS.", t);
+        if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
+          logger.error("AMS was not sent for last "
+            + maxMetricsBuffer
+            / 1000
+            + " seconds. Purging it and will start rebuilding it again");
+          metricsMap.clear();
+          lastFailedPublishTimeMS = currMS;
+        }
+      }
+    } else {
+      logger.info("Not publishing metrics. metrics.size()="
+        + metricsMap.size() + ", lastPublished="
+        + (currMS - lastPublishTimeMS) / 1000
+        + " seconds ago, intervalConfigured=" + publishIntervalMS
+        / 1000);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
index a4e0eda..6f84251 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
@@ -24,10 +24,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
-import org.apache.ambari.logfeeder.ConfigBlock;
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.MetricCount;
+import org.apache.ambari.logfeeder.common.ConfigBlock;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public abstract class Output extends ConfigBlock {

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
index aef8dc5..18a5a54 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputFile.java
@@ -26,8 +26,8 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.CSVPrinter;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
index f711a5f..a360215 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputHDFSFile.java
@@ -19,12 +19,12 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.spool.LogSpooler;
 import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext;
 import org.apache.ambari.logfeeder.output.spool.RolloverCondition;
 import org.apache.ambari.logfeeder.output.spool.RolloverHandler;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.LogfeederHDFSUtil;
 import org.apache.ambari.logfeeder.util.PlaceholderUtil;
 import org.apache.commons.lang3.StringUtils;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
index a7f2321..2595d87 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputKafka.java
@@ -25,8 +25,8 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.LinkedTransferQueue;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.clients.producer.Callback;
 import org.apache.kafka.clients.producer.KafkaProducer;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
new file mode 100644
index 0000000..0a6b7fa
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputMgr.java
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public class OutputMgr {
+  private static final Logger logger = Logger.getLogger(OutputMgr.class);
+
+  private Collection<Output> outputList = new ArrayList<Output>();
+
+  private boolean addMessageMD5 = true;
+
+  private int MAX_OUTPUT_SIZE = 32765; // 32766-1
+  private static long doc_counter = 0;
+  private MetricCount messageTruncateMetric = new MetricCount();
+
+  
+  public Collection<Output> getOutputList() {
+    return outputList;
+  }
+
+  public void setOutputList(Collection<Output> outputList) {
+    this.outputList = outputList;
+  }
+
+  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+
+    // Update the block with the context fields
+    for (Map.Entry<String, String> entry : input.getContextFields()
+      .entrySet()) {
+      if (jsonObj.get(entry.getKey()) == null) {
+        jsonObj.put(entry.getKey(), entry.getValue());
+      }
+    }
+
+    // TODO: Ideally most of the overrides should be configurable
+
+    // Add the input type
+    if (jsonObj.get("type") == null) {
+      jsonObj.put("type", input.getStringValue("type"));
+    }
+    if (jsonObj.get("path") == null && input.getFilePath() != null) {
+      jsonObj.put("path", input.getFilePath());
+    }
+    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
+      jsonObj.put("path", input.getStringValue("path"));
+    }
+
+    // Add host if required
+    if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
+      jsonObj.put("host", LogFeederUtil.hostName);
+    }
+    // Add IP if required
+    if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
+      jsonObj.put("ip", LogFeederUtil.ipAddress);
+    }
+    
+    //Add level
+    if (jsonObj.get("level") == null) {
+      jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
+    }
+    if (input.isUseEventMD5() || input.isGenEventMD5()) {
+      String prefix = "";
+      Object logtimeObj = jsonObj.get("logtime");
+      if (logtimeObj != null) {
+        if (logtimeObj instanceof Date) {
+          prefix = "" + ((Date) logtimeObj).getTime();
+        } else {
+          prefix = logtimeObj.toString();
+        }
+      }
+      Long eventMD5 = LogFeederUtil.genHash(LogFeederUtil.getGson()
+        .toJson(jsonObj));
+      if (input.isGenEventMD5()) {
+        jsonObj.put("event_md5", prefix + eventMD5.toString());
+      }
+      if (input.isUseEventMD5()) {
+        jsonObj.put("id", prefix + eventMD5.toString());
+      }
+    }
+
+    // jsonObj.put("@timestamp", new Date());
+    jsonObj.put("seq_num", new Long(doc_counter++));
+    if (jsonObj.get("id") == null) {
+      jsonObj.put("id", UUID.randomUUID().toString());
+    }
+    if (jsonObj.get("event_count") == null) {
+      jsonObj.put("event_count", new Integer(1));
+    }
+    if (inputMarker.lineNumber > 0) {
+      jsonObj.put("logfile_line_number", new Integer(
+        inputMarker.lineNumber));
+    }
+    if (jsonObj.containsKey("log_message")) {
+      // TODO: Let's check size only for log_message for now
+      String logMessage = (String) jsonObj.get("log_message");
+      if (logMessage != null
+        && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
+        messageTruncateMetric.count++;
+        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
+          + "_MESSAGESIZE";
+        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
+          "Message is too big. size="
+            + logMessage.getBytes().length + ", input="
+            + input.getShortDescription()
+            + ". Truncating to " + MAX_OUTPUT_SIZE
+            + ", first upto 100 characters="
+            + LogFeederUtil.subString(logMessage, 100),
+          null, logger, Level.WARN);
+        logMessage = new String(logMessage.getBytes(), 0,
+          MAX_OUTPUT_SIZE);
+        jsonObj.put("log_message", logMessage);
+        // Add error tags
+        @SuppressWarnings("unchecked")
+        List<String> tagsList = (List<String>) jsonObj.get("tags");
+        if (tagsList == null) {
+          tagsList = new ArrayList<String>();
+          jsonObj.put("tags", tagsList);
+        }
+        tagsList.add("error_message_truncated");
+
+      }
+      if (addMessageMD5) {
+        jsonObj.put("message_md5",
+          "" + LogFeederUtil.genHash(logMessage));
+      }
+    }
+    //check log is allowed to send output
+    if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
+      for (Output output : input.getOutputList()) {
+        try {
+          output.write(jsonObj, inputMarker);
+        } catch (Exception e) {
+          logger.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  public void write(String jsonBlock, InputMarker inputMarker) {
+    //check log is allowed to send output
+    if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
+      for (Output output : inputMarker.input.getOutputList()) {
+        try {
+          output.write(jsonBlock, inputMarker);
+        } catch (Exception e) {
+          logger.error("Error writing. to " + output.getShortDescription(), e);
+        }
+      }
+    }
+  }
+
+  public void close() {
+    logger.info("Close called for outputs ...");
+    for (Output output : outputList) {
+      try {
+        output.setDrain(true);
+        output.close();
+      } catch (Exception e) {
+        // Ignore
+      }
+    }
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    int i;
+    boolean allClosed = true;
+    for (i = 0; i < iterations; i++) {
+      allClosed = true;
+      for (Output output : outputList) {
+        if (!output.isClosed()) {
+          try {
+            allClosed = false;
+            logger.warn("Waiting for output to close. "
+              + output.getShortDescription() + ", "
+              + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        break;
+      }
+    }
+
+    if (!allClosed) {
+      logger.warn("Some outpus were not closed. Iterations=" + i);
+      for (Output output : outputList) {
+        if (!output.isClosed()) {
+          logger.warn("Output not closed. Will ignore it."
+            + output.getShortDescription() + ", pendingCound="
+            + output.getPendingCount());
+        }
+      }
+    } else {
+      logger.info("All outputs are closed. Iterations=" + i);
+    }
+  }
+
+  public void logStats() {
+    for (Output output : outputList) {
+      output.logStat();
+    }
+    LogFeederUtil.logStatForMetric(messageTruncateMetric,
+      "Stat: Messages Truncated", null);
+  }
+
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    metricsList.add(messageTruncateMetric);
+    for (Output output : outputList) {
+      output.addMetricsContainers(metricsList);
+    }
+  }
+
+  
+  public void copyFile(File inputFile, InputMarker inputMarker) {
+    Input input = inputMarker.input;
+    for (Output output : input.getOutputList()) {
+      try {
+        output.copyFile(inputFile, inputMarker);
+      }catch (Exception e) {
+        logger.error("Error coyping file . to " + output.getShortDescription(),
+            e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
index cbc1045..e95f8df 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
@@ -22,14 +22,14 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import org.apache.ambari.logfeeder.LogFeeder;
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.spool.LogSpooler;
 import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext;
 import org.apache.ambari.logfeeder.output.spool.RolloverCondition;
 import org.apache.ambari.logfeeder.output.spool.RolloverHandler;
-import org.apache.ambari.logfeeder.s3.S3Util;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.S3Util;
 import org.apache.log4j.Logger;
 
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index b4dac72..cd9ce4d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -33,9 +33,9 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
index 1bbf33e..58282e0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3LogPathResolver.java
@@ -18,9 +18,9 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.s3.S3Util;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.PlaceholderUtil;
+import org.apache.ambari.logfeeder.util.S3Util;
 
 import java.util.HashMap;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
index fb597d3..485b0d4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3OutputConfiguration.java
@@ -18,11 +18,11 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.ConfigBlock;
-
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.ambari.logfeeder.common.ConfigBlock;
+
 /**
  * Holds all configuration relevant for S3 upload.
  */

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
index dec685f..fd59c51 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/S3Uploader.java
@@ -19,9 +19,9 @@
 package org.apache.ambari.logfeeder.output;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.s3.S3Util;
+
 import org.apache.ambari.logfeeder.util.CompressionUtil;
+import org.apache.ambari.logfeeder.util.S3Util;
 import org.apache.log4j.Logger;
 
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/AWSUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/AWSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/AWSUtil.java
deleted file mode 100644
index d0fbb6c..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/AWSUtil.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.s3;
-
-import org.apache.log4j.Logger;
-
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.services.identitymanagement.AmazonIdentityManagementClient;
-
-public enum AWSUtil {
-  INSTANCE;
-  private static final Logger LOG = Logger.getLogger(AWSUtil.class);
-
-  public String getAwsUserName(String accessKey, String secretKey) {
-    String username = null;
-    AWSCredentials awsCredentials = createAWSCredentials(accessKey, secretKey);
-    AmazonIdentityManagementClient amazonIdentityManagementClient;
-    if (awsCredentials != null) {
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient(
-          awsCredentials);
-    } else {
-      // create default client
-      amazonIdentityManagementClient = new AmazonIdentityManagementClient();
-    }
-    try {
-      username = amazonIdentityManagementClient.getUser().getUser()
-          .getUserName();
-    } catch (AmazonServiceException e) {
-      if (e.getErrorCode().compareTo("AccessDenied") == 0) {
-        String arn = null;
-        String msg = e.getMessage();
-        int arnIdx = msg.indexOf("arn:aws");
-        if (arnIdx != -1) {
-          int arnSpace = msg.indexOf(" ", arnIdx);
-          // should be similar to "arn:aws:iam::111111111111:user/username"
-          arn = msg.substring(arnIdx, arnSpace);
-        }
-        if (arn != null) {
-          String[] arnParts = arn.split(":");
-          if (arnParts != null && arnParts.length > 5) {
-            username = arnParts[5];
-            if (username != null) {
-              username = username.replace("user/", "");
-            }
-          }
-        }
-      }
-    } catch (Exception exception) {
-      LOG.error(
-          "Error in getting username :" + exception.getLocalizedMessage(),
-          exception.getCause());
-    }
-    return username;
-  }
-
-  public AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
-    if (accessKey != null && secretKey != null) {
-      LOG.debug("Creating aws client as per new accesskey and secretkey");
-      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey,
-          secretKey);
-      return awsCredentials;
-    } else {
-      return null;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/S3Util.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/S3Util.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/S3Util.java
deleted file mode 100644
index db187be..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/s3/S3Util.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.s3;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.zip.GZIPInputStream;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.log4j.Logger;
-
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.GetObjectRequest;
-import com.amazonaws.services.s3.model.ObjectMetadata;
-import com.amazonaws.services.s3.model.PutObjectRequest;
-import com.amazonaws.services.s3.model.S3Object;
-import com.amazonaws.services.s3.transfer.TransferManager;
-import com.amazonaws.services.s3.transfer.Upload;
-
-/**
- * Utility to connect to s3
- */
-public class S3Util {
-  public static final S3Util INSTANCE = new S3Util();
-
-  private static final Logger LOG = Logger.getLogger(S3Util.class);
-
-  public static final String S3_PATH_START_WITH = "s3://";
-  public static final String S3_PATH_SEPARATOR = "/";
-
-  public AmazonS3 getS3Client(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
-    AmazonS3 s3client;
-    if (awsCredentials != null) {
-      s3client = new AmazonS3Client(awsCredentials);
-    } else {
-      s3client = new AmazonS3Client();
-    }
-    return s3client;
-  }
-
-  public TransferManager getTransferManager(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
-    TransferManager transferManager;
-    if (awsCredentials != null) {
-      transferManager = new TransferManager(awsCredentials);
-    } else {
-      transferManager = new TransferManager();
-    }
-    return transferManager;
-  }
-
-  public void shutdownTransferManager(TransferManager transferManager) {
-    if (transferManager != null) {
-      transferManager.shutdownNow();
-    }
-  }
-
-  public String getBucketName(String s3Path) {
-    String bucketName = null;
-    // s3path
-    if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
-      bucketName = s3PathParts[0];
-    }
-    return bucketName;
-  }
-
-  public String getS3Key(String s3Path) {
-    StringBuilder s3Key = new StringBuilder();
-    // s3path
-    if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
-      ArrayList<String> s3PathList = new ArrayList<String>(
-          Arrays.asList(s3PathParts));
-      s3PathList.remove(0);// remove bucketName
-      for (int index = 0; index < s3PathList.size(); index++) {
-        if (index > 0) {
-          s3Key.append(S3_PATH_SEPARATOR);
-        }
-        s3Key.append(s3PathList.get(index));
-      }
-    }
-    return s3Key.toString();
-  }
-
-  public void uploadFileTos3(String bucketName, String s3Key, File localFile,
-      String accessKey, String secretKey) {
-    TransferManager transferManager = getTransferManager(accessKey, secretKey);
-    try {
-      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
-      upload.waitForUploadResult();
-    } catch (AmazonClientException | InterruptedException e) {
-      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(),
-          e);
-    } finally {
-      shutdownTransferManager(transferManager);
-    }
-  }
-
-  /**
-   * Get the buffer reader to read s3 file as a stream
-   */
-  public BufferedReader getReader(String s3Path, String accessKey,
-      String secretKey) throws IOException {
-    // TODO error handling
-    // Compression support
-    // read header and decide the compression(auto detection)
-    // For now hard-code GZIP compression
-    String s3Bucket = getBucketName(s3Path);
-    String s3Key = getS3Key(s3Path);
-    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(
-        new GetObjectRequest(s3Bucket, s3Key));
-    GZIPInputStream objectInputStream;
-    try {
-      objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
-      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(
-          objectInputStream));
-      return bufferedReader;
-    } catch (IOException e) {
-      LOG.error("Error in creating stream reader for s3 file :" + s3Path,
-          e.getCause());
-      throw e;
-    }
-  }
-
-  public void writeIntoS3File(String data, String bucketName, String s3Key,
-      String accessKey, String secretKey) {
-    InputStream in = null;
-    try {
-      in = IOUtils.toInputStream(data, "UTF-8");
-    } catch (IOException e) {
-      LOG.error(e);
-    }
-    if (in != null) {
-      TransferManager transferManager = getTransferManager(accessKey, secretKey);
-      try {
-        if (transferManager != null) {
-          transferManager.upload(
-                  new PutObjectRequest(bucketName, s3Key, in,
-                  new ObjectMetadata())).waitForUploadResult();
-          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :"
-              + bucketName);
-        }
-      } catch (AmazonClientException | InterruptedException e) {
-        LOG.error(e);
-      } finally {
-        try {
-          shutdownTransferManager(transferManager);
-          in.close();
-        } catch (IOException e) {
-          // ignore
-        }
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
new file mode 100644
index 0000000..15f7594
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AWSUtil.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import org.apache.log4j.Logger;
+
+import com.amazonaws.AmazonServiceException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.services.identitymanagement.AmazonIdentityManagementClient;
+
+public enum AWSUtil {
+  INSTANCE;
+  private static final Logger LOG = Logger.getLogger(AWSUtil.class);
+
+  public String getAwsUserName(String accessKey, String secretKey) {
+    String username = null;
+    AWSCredentials awsCredentials = createAWSCredentials(accessKey, secretKey);
+    AmazonIdentityManagementClient amazonIdentityManagementClient;
+    if (awsCredentials != null) {
+      amazonIdentityManagementClient = new AmazonIdentityManagementClient(
+          awsCredentials);
+    } else {
+      // create default client
+      amazonIdentityManagementClient = new AmazonIdentityManagementClient();
+    }
+    try {
+      username = amazonIdentityManagementClient.getUser().getUser()
+          .getUserName();
+    } catch (AmazonServiceException e) {
+      if (e.getErrorCode().compareTo("AccessDenied") == 0) {
+        String arn = null;
+        String msg = e.getMessage();
+        int arnIdx = msg.indexOf("arn:aws");
+        if (arnIdx != -1) {
+          int arnSpace = msg.indexOf(" ", arnIdx);
+          // should be similar to "arn:aws:iam::111111111111:user/username"
+          arn = msg.substring(arnIdx, arnSpace);
+        }
+        if (arn != null) {
+          String[] arnParts = arn.split(":");
+          if (arnParts != null && arnParts.length > 5) {
+            username = arnParts[5];
+            if (username != null) {
+              username = username.replace("user/", "");
+            }
+          }
+        }
+      }
+    } catch (Exception exception) {
+      LOG.error(
+          "Error in getting username :" + exception.getLocalizedMessage(),
+          exception.getCause());
+    }
+    return username;
+  }
+
+  public AWSCredentials createAWSCredentials(String accessKey, String secretKey) {
+    if (accessKey != null && secretKey != null) {
+      LOG.debug("Creating aws client as per new accesskey and secretkey");
+      AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey,
+          secretKey);
+      return awsCredentials;
+    } else {
+      return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
new file mode 100644
index 0000000..a92ba29
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/AliasUtil.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import java.io.File;
+import java.util.HashMap;
+
+import org.apache.log4j.Logger;
+
+public class AliasUtil {
+
+  private static Logger logger = Logger.getLogger(AliasUtil.class);
+
+  private static AliasUtil instance = null;
+
+  private static String aliasConfigJson = "alias_config.json";
+
+  private HashMap<String, Object> aliasMap = null;
+
+  public static enum ALIAS_TYPE {
+    INPUT, FILTER, MAPPER, OUTPUT
+  }
+
+  public static enum ALIAS_PARAM {
+    KLASS
+  }
+
+  private AliasUtil() {
+    init();
+  }
+
+  public static AliasUtil getInstance() {
+    if (instance == null) {
+      synchronized (AliasUtil.class) {
+        if (instance == null) {
+          instance = new AliasUtil();
+        }
+      }
+    }
+    return instance;
+  }
+
+  /**
+   */
+  private void init() {
+    File jsonFile = LogFeederUtil.getFileFromClasspath(aliasConfigJson);
+    if (jsonFile != null) {
+      this.aliasMap = LogFeederUtil.readJsonFromFile(jsonFile);
+    }
+
+  }
+
+
+  public String readAlias(String key, ALIAS_TYPE aliastype, ALIAS_PARAM aliasParam) {
+    String result = key;// key as a default value;
+    HashMap<String, String> aliasInfo = getAliasInfo(key, aliastype);
+    String value = aliasInfo.get(aliasParam.name().toLowerCase());
+    if (value != null && !value.isEmpty()) {
+      result = value;
+      logger.debug("Alias found for key :" + key + ",  param :" + aliasParam.name().toLowerCase() + ", value :"
+        + value + " aliastype:" + aliastype.name());
+    } else {
+      logger.debug("Alias not found for key :" + key + ", param :" + aliasParam.name().toLowerCase());
+    }
+    return result;
+  }
+
+  @SuppressWarnings("unchecked")
+  private HashMap<String, String> getAliasInfo(String key, ALIAS_TYPE aliastype) {
+    HashMap<String, String> aliasInfo = null;
+    if (aliasMap != null) {
+      String typeKey = aliastype.name().toLowerCase();
+      HashMap<String, Object> typeJson = (HashMap<String, Object>) aliasMap.get(typeKey);
+      if (typeJson != null) {
+        aliasInfo = (HashMap<String, String>) typeJson.get(key);
+      }
+    }
+    if (aliasInfo == null) {
+      aliasInfo = new HashMap<String, String>();
+    }
+    return aliasInfo;
+  }
+}


[06/50] [abbrv] ambari git commit: AMBARI-18161. Support UNKNOWN log level for Log Search (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
AMBARI-18161. Support UNKNOWN log level for Log Search (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29c66f7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29c66f7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29c66f7f

Branch: refs/heads/branch-dev-logsearch
Commit: 29c66f7ff4f9929e4d3afacbf19f887daefdc786
Parents: 00b1ac7
Author: oleewere <ol...@gmail.com>
Authored: Mon Aug 22 11:01:36 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../org/apache/ambari/logfeeder/OutputMgr.java  |  7 +-
 .../logfeeder/logconfig/LogFeederConstants.java |  1 +
 .../logsearch/common/LogSearchConstants.java    |  3 +
 .../logsearch/graph/GraphDataGenerator.java     | 18 +----
 .../ambari/logsearch/manager/LogsMgr.java       | 84 +++-----------------
 .../ambari/logsearch/util/ConfigUtil.java       | 14 ----
 .../main/webapp/scripts/views/common/Header.js  |  2 +-
 .../scripts/views/dashboard/LogLevelBoxView.js  |  3 +-
 .../scripts/views/dashboard/LogLevelView.js     |  1 +
 .../views/filter/CreateLogfeederFilterView.js   |  2 +-
 .../scripts/views/graphs/GraphLayoutView.js     |  3 +-
 .../scripts/views/graphs/GridGraphLayoutView.js |  3 +-
 .../webapp/scripts/views/tabs/LogFileView.js    |  2 +-
 .../troubleshoot/TroubleShootLayoutView.js      |  3 +-
 .../src/main/webapp/styles/style.css            | 46 +++++++++--
 .../dashboard/LogLevelBoxView_tmpl.html         | 17 ++--
 .../filter/CreateLogfeederFilter_tmpl.html      |  1 +
 17 files changed, 87 insertions(+), 123 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
index c65b352..41b005b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
@@ -29,6 +29,7 @@ import java.util.UUID;
 
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
 import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
 import org.apache.ambari.logfeeder.output.Output;
 import org.apache.log4j.Level;
@@ -86,7 +87,11 @@ public class OutputMgr {
     if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
       jsonObj.put("ip", LogFeederUtil.ipAddress);
     }
-
+    
+    //Add level
+    if (jsonObj.get("level") == null) {
+      jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
+    }
     if (input.isUseEventMD5() || input.isGenEventMD5()) {
       String prefix = "";
       Object logtimeObj = jsonObj.get("logtime");

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
index b069029..09673a0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
@@ -22,6 +22,7 @@ public class LogFeederConstants {
 
   public static final String ALL = "all";
   public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config";
+  public static final String LOG_LEVEL_UNKNOWN = "UNKNOWN";
   // solr fields
   public static final String SOLR_LEVEL = "level";
   public static final String SOLR_COMPONENT = "type";

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 593be65..43a7eb7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -27,6 +27,9 @@ public class LogSearchConstants {
   public static final String ERROR = "ERROR";
   public static final String TRACE = "TRACE";
   public static final String FATAL = "FATAL";
+  public static final String UNKNOWN = "UNKNOWN";
+  
+  public static final String[] SUPPORTED_LOG_LEVEL ={FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN};
 
   // Application Constants
   public static final String HOST = "H";

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index 92baaff..361f8e3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -188,15 +188,8 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
           }
         }
         if (xAxisField.equalsIgnoreCase(LogSearchConstants.SOLR_LEVEL)) {
-          List<String> logLevels = new ArrayList<String>();
-          logLevels.add(LogSearchConstants.FATAL);
-          logLevels.add(LogSearchConstants.ERROR);
-          logLevels.add(LogSearchConstants.WARN);
-          logLevels.add(LogSearchConstants.INFO);
-          logLevels.add(LogSearchConstants.DEBUG);
-          logLevels.add(LogSearchConstants.TRACE);
           Collection<VNameValue> sortedVNameValues = new ArrayList<VNameValue>();
-          for (String level : logLevels) {
+          for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
             VNameValue value = new VNameValue();
             value.setName(level);
             String val = "0";
@@ -292,17 +285,10 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField)
           && LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(xAxisField)) {
         Collection<VBarGraphData> levelVGraphData = dataList.getGraphData();
-        List<String> logLevels = new ArrayList<String>();
-        logLevels.add(LogSearchConstants.FATAL);
-        logLevels.add(LogSearchConstants.ERROR);
-        logLevels.add(LogSearchConstants.WARN);
-        logLevels.add(LogSearchConstants.INFO);
-        logLevels.add(LogSearchConstants.DEBUG);
-        logLevels.add(LogSearchConstants.TRACE);
         for (VBarGraphData garphData : levelVGraphData) {
           Collection<VNameValue> valueList = garphData.getDataCount();
           Collection<VNameValue> valueListSorted = new ArrayList<VNameValue>();
-          for (String level : logLevels) {
+          for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
             String val = "0";
             for (VNameValue value : valueList) {
               if (value.getName().equalsIgnoreCase(level)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index a2c15f4..748d2f9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -540,84 +540,25 @@ public class LogsMgr extends MgrBase {
     HashMap<String, String> map = new HashMap<String, String>();
     List<VNameValue> logsCounts = new ArrayList<VNameValue>();
     try {
-
       queryGenerator.setFacetField(query, LogSearchConstants.SOLR_LEVEL);
-
       List<Count> logLevelCounts = getFacetCounts(query,
-        LogSearchConstants.SOLR_LEVEL);
-      if(logLevelCounts == null){
+          LogSearchConstants.SOLR_LEVEL);
+      if (logLevelCounts == null) {
         return logsCounts;
       }
       for (Count count : logLevelCounts) {
         map.put(count.getName().toUpperCase(), "" + count.getCount());
       }
-      String level = LogSearchConstants.FATAL;
-      VNameValue nameValue = null;
-
-      String value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
-      }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
-      level = LogSearchConstants.ERROR;
-
-      value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
-      }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
-      level = LogSearchConstants.WARN;
-
-      value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
-      }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
-      level = LogSearchConstants.INFO;
-
-      value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
-      }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
-      level = LogSearchConstants.DEBUG;
-
-      value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
-      }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
-      level = LogSearchConstants.TRACE;
-
-      value = map.get(level);
-      if (stringUtil.isEmpty(value)){
-        value = defalutValue;
+      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
+        VNameValue nameValue = new VNameValue();
+        String value = map.get(level);
+        if (stringUtil.isEmpty(value)) {
+          value = defalutValue;
+        }
+        nameValue.setName(level);
+        nameValue.setValue(value);
+        logsCounts.add(nameValue);
       }
-      nameValue = new VNameValue();
-      nameValue.setName(level);
-      nameValue.setValue(value);
-      logsCounts.add(nameValue);
-
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + query, e);
     }
@@ -1260,7 +1201,6 @@ public class LogsMgr extends MgrBase {
     String unit = getUnit((String) searchCriteria.getParamValue("unit"));
 
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-    List<String> logLevels = ConfigUtil.logLevels;
 
     String jsonHistogramQuery = queryGenerator
       .buildJSONFacetTermTimeRangeQuery(
@@ -1287,7 +1227,7 @@ public class LogsMgr extends MgrBase {
 
       Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
       List<VBarGraphData> graphDatas = new ArrayList<VBarGraphData>();
-      for (String level : logLevels) {
+      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
         boolean isLevelPresent = false;
         VBarGraphData vData1 = null;
         for (VBarGraphData vData2 : histogramData) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
index 2bf3b51..bdd304f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
@@ -19,7 +19,6 @@
 
 package org.apache.ambari.logsearch.util;
 
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -35,8 +34,6 @@ import org.codehaus.jettison.json.JSONObject;
 public class ConfigUtil {
   static Logger logger = Logger.getLogger(MgrBase.class);
 
-  public static List<String> logLevels = new ArrayList<String>();
-
   public static HashMap<String, String> serviceLogsColumnMapping = new HashMap<String, String>();
 
   public static HashMap<String, String> auditLogsColumnMapping = new HashMap<String, String>();
@@ -44,7 +41,6 @@ public class ConfigUtil {
   public static HashMap<String, String> schemaFieldsName = new HashMap<String, String>();
 
   public static void initializeApplicationConfig() {
-    intializeLogLevels();
     initializeColumnMapping();
   }
 
@@ -67,16 +63,6 @@ public class ConfigUtil {
       }
     }
   }
-
-  private static void intializeLogLevels() {
-    logLevels.add(LogSearchConstants.TRACE);
-    logLevels.add(LogSearchConstants.DEBUG);
-    logLevels.add(LogSearchConstants.INFO);
-    logLevels.add(LogSearchConstants.WARN);
-    logLevels.add(LogSearchConstants.ERROR);
-    logLevels.add(LogSearchConstants.FATAL);
-  }
-
   private static void initializeColumnMapping() {
     String serviceLogsColumnMappingArray[] = PropertiesUtil
       .getPropertyStringList("logsearch.solr.service.logs.column.mapping");

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
index cb8ca4e..e88afea 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
@@ -172,7 +172,7 @@ define(['require',
                         content: view,
                         viewType: 'Filter',
                         resizable: false,
-                        width: 950,
+                        width: 1000,
                         height: 550,
                         autoFocus1stElement : false,
                         buttons: [{

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
index d2ee3d8..0e7f1b8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
@@ -44,6 +44,7 @@ define(['require',
 			DEBUG : "[data-id='DEBUG']",
 			FATAL : "[data-id='FATAL']",
 			TRACE : "[data-id='TRACE']",
+			UNKNOWN : "[data-id='UNKNOWN']",
 			loader:".server-info .fa-spin"
 		},
 
@@ -80,7 +81,7 @@ define(['require',
 			}
 		},
 		fetchLogLevelCounts : function(params){
-			$.extend(this.logLevelList.queryParams,params,{level: "FATAL,ERROR,WARN,INFO,DEBUG,TRACE"});
+			$.extend(this.logLevelList.queryParams,params,{level: "FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN"});
 			this.ui.loader.show();
 			this.logLevelList.fetch({reset:true});
 		},

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelView.js
index 17a67fb..79c8bcf 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelView.js
@@ -45,6 +45,7 @@ define(['require',
 			DEBUG : "[data-id='DEBUG']",
 			FATAL : "[data-id='FATAL']",
 			TRACE : "[data-id='TRACE']",
+			UNKNOWN : "[data-id='UNKNOWN']",
 			togglePieViewButton:'#logToggle',
 			pieRegionId:'#r_logLevelPieChart',
 			logTable:'#logTable'

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
index 95324d9..9bdf0fa 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
@@ -81,7 +81,7 @@ define(['require',
                 this.model = new VUserFilter();
 
                 this.levelCollection = new Backbone.Collection();
-                var levelArr = ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
+                var levelArr = ["FATAL", "ERROR", "WARN", "INFO", "DEBUG", "TRACE", "UNKNOWN"];
 
                 for (var i in levelArr) {
                     this.levelCollection.add(new Backbone.Model({ type: levelArr[i] }));

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
index 76d0537..35d0c9a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
@@ -255,7 +255,8 @@ define(['require',
                             (model.get('name') === 'WARN') ? ("#FF8916") :
                             (model.get('name') === 'FATAL') ? ("#830A0A") :
                             (model.get('name') === 'DEBUG') ? ("#65E8FF") :
-                            (model.get('name') === 'TRACE') ? ("#888888") : ("white"));
+                            (model.get('name') === 'TRACE') ? ("#888888") : 
+                            (model.get('name') === 'UNKNOWN') ? ("#bdbdbd") : ("white"));
                     } else {
                         Obj['color'] = color[i];
                     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
index 47393bb..ae76ba8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
@@ -203,7 +203,8 @@ define(['require',
                                    ( (""+model.get('name')).toUpperCase() === 'WARN') ? ("#FF8916") :
                                    ( (""+model.get('name')).toUpperCase() === 'FATAL') ? ("#830A0A") :
                                    ( (""+model.get('name')).toUpperCase() === 'DEBUG') ? ("#65E8FF") :
-                                   ( (""+model.get('name')).toUpperCase() === 'TRACE') ? ("#888888") : color[i]);
+                                   ( (""+model.get('name')).toUpperCase() === 'TRACE') ? ("#888888") :
+                                   ( (""+model.get('name')).toUpperCase() === 'UNKNOWN') ? ("#bdbdbd") : color[i]);
                            } else {
                                Obj['color'] = color[i];
                            }

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
index 4b42eab..0b32237 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
@@ -495,7 +495,7 @@ define(['require',
                         direction: "descending",
                         orderable: true,
                         displayOrder: 1,
-                        width: "10",
+                        width: "17",
                         className: "logTime",
                         formatter: _.extend({}, Backgrid.CellFormatter.prototype, {
                             fromRaw: function(rawValue, model) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
index 66f851c..a6445c6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
@@ -369,7 +369,8 @@ define(['backbone',
                                 ( (""+name).toUpperCase() === 'WARN') ? ("#FF8916") :
                                 ( (""+name).toUpperCase() === 'FATAL') ? ("#830A0A") :
                                 ( (""+name).toUpperCase() === 'DEBUG') ? ("#65E8FF") :
-                                ( (""+name).toUpperCase() === 'TRACE') ? ("#888888") : "")
+                                ( (""+name).toUpperCase() === 'TRACE') ? ("#888888") :
+                                ( (""+name).toUpperCase() === 'UNKNOWN') ? ("#bdbdbd") : "")
             			});
             			
             		}

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
index 240cc4d..543533b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/styles/style.css
@@ -261,7 +261,9 @@ p.log-line:before {
 .bubbleGraph .label,.node--root,.node--leaf {
   pointer-events: none;
 }
-
+.UNKNOWN {
+  fill: #bdbdbd;
+}
 .TRACE {
   fill: #888888;
 }
@@ -489,6 +491,9 @@ div.columnmanager-visibilitycontrol {
   height: 100%;
   float: left;
 }
+.node.UNKNOWN {
+  background-color: #bdbdbd;
+}
 
 .node.TRACE {
   background-color: #888888;
@@ -514,6 +519,9 @@ div.columnmanager-visibilitycontrol {
   background-color: #830A0A;
 }
 
+.label-UNKNOWN {
+  background-color: #bdbdbd;
+}
 .label-TRACE {
   background-color: #888888;
 }
@@ -589,6 +597,15 @@ div.columnmanager-visibilitycontrol {
   top: -3px;
   right: 0;
 }
+.server-info a.node.UNKNOWN {
+  border-color: #bdbdbd;
+  color: #bdbdbd;
+}
+
+.server-info a.node.UNKNOWN.active {
+  background-color: #bdbdbd;
+  color: #fff;
+}
 
 .server-info a.node.TRACE {
   border-color: #888888;
@@ -649,6 +666,9 @@ div.columnmanager-visibilitycontrol {
   background-color: #830A0A;
   color: #fff;
 }
+.UNKNOWN {
+  color: #bdbdbd;
+}
 
 .TRACE {
   color: #888888;
@@ -2348,19 +2368,19 @@ input:-webkit-autofill,input:-webkit-autofill:hover,input:-webkit-autofill:focus
 }
 
 .logFeederTable {
-  width: 906px;
+  width: 965px;
   display: block;
 }
 
 .logFeederTable tbody {
   height: 365px;
   overflow-y: auto;
-  width: 906px;
+  width: 965px;
   display: block;
 }
 
 .logFeederTable th {
-  width: 90px;
+  width: 106px;
   border-left: 1px solid #CCC;
   border-right: 1px solid #CCC;
   border-top: 1px solid #CCC !important;
@@ -2376,7 +2396,7 @@ input:-webkit-autofill,input:-webkit-autofill:hover,input:-webkit-autofill:focus
 }
 
 .logFeederTable th:nth-child(2) {
-  width: 190px;
+  width: 165px;
   border-top: 1px solid #CCC !important;
   border-bottom: 1px solid #CCC !important;
 }
@@ -2391,12 +2411,12 @@ input:-webkit-autofill,input:-webkit-autofill:hover,input:-webkit-autofill:focus
 }
 
 .logFeederTable td:first-of-type {
-  width: 269px;
+  width: 240px;
   padding: 0 10px;
 }
 
 .logFeederTable td:nth-child(2) {
-  width: 178px;
+  width: 135px;
   padding: 0 10px;
 }
 
@@ -2929,4 +2949,16 @@ button.defaultCancelBtn{
 }
 .comparisonTab .daterangepicker .ranges{
   float: right !important;
+}
+
+/*------------New Log Levels Tab CSS--------------*/
+.levelBox{
+  width: 13%;
+  margin: 0 .6%;
+  float: left;
+  position: relative;
+  min-height: 1px;
+}
+.levelBox:last-of-type{
+  clear : right;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/LogLevelBoxView_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/LogLevelBoxView_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/LogLevelBoxView_tmpl.html
index 4c3cd8d..0424cc0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/LogLevelBoxView_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/dashboard/LogLevelBoxView_tmpl.html
@@ -16,29 +16,34 @@
 -->
 <div class="server-info">
 	<a href="javascript:void(0);"
-		class="col-md-2 node FATAL text-center active"> <strong
+		class="levelBox node FATAL text-center active"> <strong
 		data-total="FATAL">0</strong><span>Fatal</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a> <a href="javascript:void(0);"
-		class="col-md-2 node ERROR text-center active"> <strong
+		class="levelBox node ERROR text-center active"> <strong
 		data-total="ERROR">0</strong><span>Error</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a> <a href="javascript:void(0);"
-		class="col-md-2 node WARN text-center active"> <strong
+		class="levelBox node WARN text-center active"> <strong
 		data-total="WARN">0</strong><span>Warn</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a> <a href="javascript:void(0);"
-		class="col-md-2 node INFO text-center active"> <strong
+		class="levelBox node INFO text-center active"> <strong
 		data-total="INFO">0</strong><span>Info</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a> <a href="javascript:void(0);"
-		class="col-md-2 node DEBUG text-center active"> <strong
+		class="levelBox node DEBUG text-center active"> <strong
 		data-total="DEBUG">0</strong><span>Debug</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a> <a href="javascript:void(0);"
-		class="col-md-2 node TRACE text-center active"> <strong
+		class="levelBox node TRACE text-center active"> <strong
 		data-total="TRACE">0</strong><span>Trace</span> <i
 		class="fa fa-spinner fa-spin"></i>
 	</a>
+  <a href="javascript:void(0);"
+    class="levelBox node UNKNOWN text-center active"> <strong
+    data-total="UNKNOWN">0</strong><span>Unknown</span> <i
+    class="fa fa-spinner fa-spin"></i>
+  </a>  
 
 </div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/29c66f7f/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
index fe2a5c1..a4d78fa 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/filter/CreateLogfeederFilter_tmpl.html
@@ -34,6 +34,7 @@
                                            <th class="INFO  text-center"><span class="pull-left"><input type="checkbox" data-value="INFO"></span><span>INFO</span></th>
                                            <th class="DEBUG  text-center"><span class="pull-left"><input type="checkbox" data-value="DEBUG"></span><span>DEBUG</span></th>
                                            <th class="TRACE  text-center"><span class="pull-left"><input type="checkbox" data-value="TRACE"></span><span>TRACE</span></th>
+                                           <th class="UNKNOWN  text-center"><span class="pull-left"><input type="checkbox" data-value="UNKNOWN"></span><span>UNKNOWN</span></th>
                                         </tr>
                                        </thead>
                                        <tbody id="filterContent"></tbody>


[07/50] [abbrv] ambari git commit: AMBARI-18077. Clean up Log Search Appender and improve speed (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18077. Clean up Log Search Appender and improve speed (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dff48f0a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dff48f0a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dff48f0a

Branch: refs/heads/branch-dev-logsearch
Commit: dff48f0a2297fa8f2c5c0207e4af4504a2deffbf
Parents: ff4beca
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Thu Aug 18 13:22:00 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-appender/pom.xml           |  1 -
 .../logsearch/appender/LogsearchConversion.java | 57 ++++++------
 .../ambari/logsearch/appender/Output.java       | 91 ++++++++++++++++++++
 .../apache/ambari/logsearch/appender/VBase.java | 63 --------------
 .../ambari/logsearch/appender/VOutput.java      | 88 -------------------
 .../commands/AbstractStateFileZkCommand.java    |  2 +-
 6 files changed, 117 insertions(+), 185 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-appender/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/pom.xml b/ambari-logsearch/ambari-logsearch-appender/pom.xml
index 39f250a..e14d576 100644
--- a/ambari-logsearch/ambari-logsearch-appender/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-appender/pom.xml
@@ -67,7 +67,6 @@
     <dependency>
       <groupId>log4j</groupId>
       <artifactId>log4j</artifactId>
-      <version>1.2.17</version>
     </dependency>
     <dependency>
       <groupId>com.google.code.gson</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
index dbdfe6c..877fa24 100644
--- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
+++ b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/LogsearchConversion.java
@@ -26,54 +26,47 @@ import org.apache.log4j.EnhancedPatternLayout;
 import org.apache.log4j.spi.LoggingEvent;
 
 public class LogsearchConversion extends EnhancedPatternLayout {
-  //
-  protected final int BUF_SIZE = 256;
-  protected final int MAX_CAPACITY = 1024;
 
-  private StringBuffer sbuf = new StringBuffer(BUF_SIZE);
-
-  private String newLine = System.getProperty("line.separator");
+  private static final String NEW_LINE = System.getProperty("line.separator");
 
   public LogsearchConversion() {
   }
 
   public String format(LoggingEvent event) {
-    if (sbuf.capacity() > MAX_CAPACITY) {
-      sbuf = new StringBuffer(BUF_SIZE);
-    } else {
-      sbuf.setLength(0);
-    }
     String outputStr = createOutput(event);
-    sbuf.append(outputStr + newLine);
-    return sbuf.toString();
+    return outputStr + NEW_LINE;
   }
 
   public String createOutput(LoggingEvent event) {
-    VOutput vOutput = new VOutput();
-    vOutput.setLevel(event.getLevel().toString());
-    vOutput.setFile(event.getLocationInformation().getFileName());
-    vOutput.setLine_number(Integer.parseInt(event.getLocationInformation().getLineNumber()));
-    String logmsg = event.getMessage() != null ? event.getMessage().toString() : "";
+    Output output = new Output();
+    
+    output.setLevel(event.getLevel().toString());
+    output.setFile(event.getLocationInformation().getFileName());
+    output.setLineNumber(Integer.parseInt(event.getLocationInformation().getLineNumber()));
+    output.setLogtime(Long.toString(event.getTimeStamp()));
+    output.setLoggerName(event.getLoggerName());
+    output.setThreadName(event.getThreadName());
+    output.setLogMessage(getLogMessage(event));
+    
+    return output.toJson();
+  }
+
+  public String getLogMessage(LoggingEvent event) {
+    String logMessage = event.getMessage() != null ? event.getMessage().toString() : "";
+
     if (event.getThrowableInformation() != null && event.getThrowableInformation().getThrowable() != null) {
-      logmsg += newLine + stackTraceToString(event.getThrowableInformation().getThrowable());
+      logMessage += NEW_LINE;
+      StringWriter sw = new StringWriter();
+      PrintWriter pw = new PrintWriter(sw);
+      event.getThrowableInformation().getThrowable().printStackTrace(pw);
+      logMessage += sw.toString();
     }
-    vOutput.setLog_message(logmsg);
-    vOutput.setLogtime("" + event.getTimeStamp());
-    vOutput.setLogger_name("" + event.getLoggerName());
-    vOutput.setThread_name(event.getThreadName());
-    return vOutput.toJson();
-  }
 
-  public String stackTraceToString(Throwable e) {
-    StringWriter sw = new StringWriter();
-    PrintWriter pw = new PrintWriter(sw);
-    e.printStackTrace(pw);
-    return sw.toString();
+    return logMessage;
   }
-  
+
   @Override
   public boolean ignoresThrowable() {
-    //set false to ignore exception stacktrace
     return false;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java
new file mode 100644
index 0000000..8001054
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/Output.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.appender;
+
+import java.io.IOException;
+import java.io.StringWriter;
+
+import com.google.gson.stream.JsonWriter;
+
+class Output {
+
+  private String level;
+  private String file;
+  private String threadName;
+  private int lineNumber;
+  private String loggerName;
+  private String logtime;
+  private String logMessage;
+
+  void setLevel(String level) {
+    this.level = level;
+  }
+
+  void setFile(String file) {
+    this.file = file;
+  }
+
+  void setThreadName(String threadName) {
+    this.threadName = threadName;
+  }
+
+  void setLineNumber(int lineNumber) {
+    this.lineNumber = lineNumber;
+  }
+
+  void setLoggerName(String loggerName) {
+    this.loggerName = loggerName;
+  }
+
+  void setLogtime(String logtime) {
+    this.logtime = logtime;
+  }
+
+  void setLogMessage(String logMessage) {
+    this.logMessage = logMessage;
+  }
+
+  public String toJson() {
+    StringWriter stringWriter = new StringWriter();
+    
+    try (JsonWriter writer = new JsonWriter(stringWriter)) {
+      writer.beginObject();
+      
+      if (level != null) writer.name("level").value(level);
+      if (file != null) writer.name("file").value(file);
+      if (threadName != null) writer.name("thread_name").value(threadName);
+      writer.name("line_number").value(lineNumber);
+      if (loggerName != null) writer.name("logger_name").value(loggerName);
+      if (logtime != null) writer.name("logtime").value(logtime);
+      if (logMessage != null) writer.name("log_message").value(logMessage);
+      
+      writer.endObject();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    
+    return stringWriter.toString();
+  }
+  
+  @Override
+  public String toString() {
+    return toJson();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java
deleted file mode 100644
index 4e91ccc..0000000
--- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VBase.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.appender;
-
-import java.lang.reflect.Field;
-
-import org.apache.log4j.Logger;
-
-import com.google.gson.Gson;
-
-public class VBase {
-  private static Logger logger = Logger.getLogger(VBase.class);
-
-  /**
-   *
-   */
-  @Override
-  public String toString() {
-    @SuppressWarnings("rawtypes")
-    Class klass = this.getClass();
-    Field[] fields = klass.getDeclaredFields();
-    StringBuilder builder = new StringBuilder(klass.getSimpleName() + "={");
-    for (Field field : fields) {
-      try {
-        field.setAccessible(true);
-        Object fieldValue = field.get(this);
-        String fieldName = field.getName();
-        if (!fieldName.equalsIgnoreCase("serialVersionUID")) {
-          builder.append(fieldName + "={" + fieldValue + "} ");
-        }
-
-      } catch (Exception e) {
-        logger.error(e.getLocalizedMessage(), e);
-      }
-    }
-    builder.append("}");
-
-    return builder.toString();
-  }
-
-  public String toJson() {
-    Gson gson = new Gson();
-    String json = gson.toJson(this);
-    return json;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java b/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java
deleted file mode 100644
index 61da1da..0000000
--- a/ambari-logsearch/ambari-logsearch-appender/src/main/java/org/apache/ambari/logsearch/appender/VOutput.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.appender;
-
-public class VOutput extends VBase {
-
-  private String level;
-  private String file;
-  private String thread_name;
-  private int line_number;
-  private String log_message;
-  private String logger_name;
-  private String logtime;
-
-  public String getLevel() {
-    return level;
-  }
-
-  public void setLevel(String level) {
-    this.level = level;
-  }
-
-  public String getFile() {
-    return file;
-  }
-
-  public void setFile(String file) {
-    this.file = file;
-  }
-
-  public String getThread_name() {
-    return thread_name;
-  }
-
-  public void setThread_name(String thread_name) {
-    this.thread_name = thread_name;
-  }
-
-  public int getLine_number() {
-    return line_number;
-  }
-
-  public void setLine_number(int line_number) {
-    this.line_number = line_number;
-  }
-
-  public String getLog_message() {
-    return log_message;
-  }
-
-  public void setLog_message(String log_message) {
-    this.log_message = log_message;
-  }
-
-  public String getLogger_name() {
-    return logger_name;
-  }
-
-  public void setLogger_name(String logger_name) {
-    this.logger_name = logger_name;
-  }
-
-  public String getLogtime() {
-    return logtime;
-  }
-
-  public void setLogtime(String logtime) {
-    this.logtime = logtime;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/dff48f0a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/AbstractStateFileZkCommand.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/AbstractStateFileZkCommand.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/AbstractStateFileZkCommand.java
index 664cd88..d351589 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/AbstractStateFileZkCommand.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/AbstractStateFileZkCommand.java
@@ -19,7 +19,7 @@
 package org.apache.ambari.logsearch.solr.commands;
 
 import org.apache.ambari.logsearch.solr.AmbariSolrCloudClient;
-import org.apache.ambari.logsearch.solr.domain.AmbariSolrState;;
+import org.apache.ambari.logsearch.solr.domain.AmbariSolrState;
 import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
 


[19/50] [abbrv] ambari git commit: AMBARI-18236. Fix package structure in Logfeeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
new file mode 100644
index 0000000..32029ff
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
@@ -0,0 +1,557 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.util;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.net.InetAddress;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.TimeZone;
+
+import org.apache.ambari.logfeeder.LogFeeder;
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
+import org.apache.ambari.logfeeder.mapper.Mapper;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+
+import com.google.common.collect.ObjectArrays;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.reflect.TypeToken;
+
+/**
+ * This class contains utility methods used by LogFeeder
+ */
+public class LogFeederUtil {
+  private static final Logger logger = Logger.getLogger(LogFeederUtil.class);
+
+  private static final int HASH_SEED = 31174077;
+  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  public final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+  private static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
+
+  private static Properties props;
+
+  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
+  private static int logInterval = 30000; // 30 seconds
+
+  public static String hostName = null;
+  public static String ipAddress = null;
+  
+  private static String logfeederTempDir = null;
+  
+  private static final Object _LOCK = new Object();
+  
+  static{
+    setHostNameAndIP();
+  }
+  
+  public static Gson getGson() {
+    return gson;
+  }
+
+  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
+    @Override
+    protected SimpleDateFormat initialValue() {
+      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
+      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
+      return sdf;
+    }
+  };
+
+  /**
+   * This method will read the properties from System, followed by propFile
+   * and finally from the map
+   */
+  public static void loadProperties(String propFile, String[] propNVList)
+    throws Exception {
+    logger.info("Loading properties. propFile=" + propFile);
+    props = new Properties(System.getProperties());
+    boolean propLoaded = false;
+
+    // First get properties file path from environment value
+    String propertiesFilePath = System.getProperty("properties");
+    if (propertiesFilePath != null && !propertiesFilePath.isEmpty()) {
+      File propertiesFile = new File(propertiesFilePath);
+      if (propertiesFile.exists() && propertiesFile.isFile()) {
+        logger.info("Properties file path set in environment. Loading properties file="
+          + propertiesFilePath);
+        FileInputStream fileInputStream = null;
+        try {
+          fileInputStream = new FileInputStream(propertiesFile);
+          props.load(fileInputStream);
+          propLoaded = true;
+        } catch (Throwable t) {
+          logger.error("Error loading properties file. properties file="
+            + propertiesFile.getAbsolutePath());
+        } finally {
+          if (fileInputStream != null) {
+            try {
+              fileInputStream.close();
+            } catch (Throwable t) {
+              // Ignore error
+            }
+          }
+        }
+      } else {
+        logger.error("Properties file path set in environment, but file not found. properties file="
+          + propertiesFilePath);
+      }
+    }
+
+    if (!propLoaded) {
+      BufferedInputStream fileInputStream = null;
+      try {
+        // Properties not yet loaded, let's try from class loader
+        fileInputStream = (BufferedInputStream) LogFeeder.class
+          .getClassLoader().getResourceAsStream(propFile);
+        if (fileInputStream != null) {
+          logger.info("Loading properties file " + propFile
+            + " from classpath");
+          props.load(fileInputStream);
+          propLoaded = true;
+        } else {
+          logger.fatal("Properties file not found in classpath. properties file name= "
+            + propFile);
+        }
+      } finally {
+        if (fileInputStream != null) {
+          try {
+            fileInputStream.close();
+          } catch (IOException e) {
+          }
+        }
+      }
+    }
+
+    if (!propLoaded) {
+      logger.fatal("Properties file is not loaded.");
+      throw new Exception("Properties not loaded");
+    } else {
+      updatePropertiesFromMap(propNVList);
+    }
+  }
+
+  private static void updatePropertiesFromMap(String[] nvList) {
+    if (nvList == null) {
+      return;
+    }
+    logger.info("Trying to load additional proeprties from argument paramters. nvList.length="
+      + nvList.length);
+    if (nvList != null && nvList.length > 0) {
+      for (String nv : nvList) {
+        logger.info("Passed nv=" + nv);
+        if (nv.startsWith("-") && nv.length() > 1) {
+          nv = nv.substring(1);
+          logger.info("Stripped nv=" + nv);
+          int i = nv.indexOf("=");
+          if (nv.length() > i) {
+            logger.info("Candidate nv=" + nv);
+            String name = nv.substring(0, i);
+            String value = nv.substring(i + 1);
+            logger.info("Adding property from argument to properties. name="
+              + name + ", value=" + value);
+            props.put(name, value);
+          }
+        }
+      }
+    }
+  }
+
+  static public String getStringProperty(String key) {
+    if (props != null) {
+      return props.getProperty(key);
+    }
+    return null;
+  }
+
+  static public String getStringProperty(String key, String defaultValue) {
+    if (props != null) {
+      return props.getProperty(key, defaultValue);
+    }
+    return defaultValue;
+  }
+
+  static public boolean getBooleanProperty(String key, boolean defaultValue) {
+    String strValue = getStringProperty(key);
+    return toBoolean(strValue, defaultValue);
+  }
+
+  private static boolean toBoolean(String strValue, boolean defaultValue) {
+    boolean retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      if (strValue.equalsIgnoreCase("true")
+        || strValue.equalsIgnoreCase("yes")) {
+        retValue = true;
+      } else {
+        retValue = false;
+      }
+    }
+    return retValue;
+  }
+
+  static public int getIntProperty(String key, int defaultValue) {
+    String strValue = getStringProperty(key);
+    int retValue = defaultValue;
+    retValue = objectToInt(strValue, retValue, ", key=" + key);
+    return retValue;
+  }
+
+  public static int objectToInt(Object objValue, int retValue,
+                                String errMessage) {
+    if (objValue == null) {
+      return retValue;
+    }
+    String strValue = objValue.toString();
+    if (!StringUtils.isEmpty(strValue)) {
+      try {
+        retValue = Integer.parseInt(strValue);
+      } catch (Throwable t) {
+        logger.error("Error parsing integer value. str=" + strValue
+          + ", " + errMessage);
+      }
+    }
+    return retValue;
+  }
+
+  public static boolean isEnabled(Map<String, Object> conditionConfigs,
+                                  Map<String, Object> valueConfigs) {
+    boolean allow = toBoolean((String) valueConfigs.get("is_enabled"), true);
+    @SuppressWarnings("unchecked")
+    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs
+      .get("conditions");
+    if (conditions != null && conditions.size() > 0) {
+      allow = false;
+      for (String conditionType : conditions.keySet()) {
+        if (conditionType.equalsIgnoreCase("fields")) {
+          @SuppressWarnings("unchecked")
+          Map<String, Object> fields = (Map<String, Object>) conditions
+            .get("fields");
+          for (String fieldName : fields.keySet()) {
+            Object values = fields.get(fieldName);
+            if (values instanceof String) {
+              allow = isFieldConditionMatch(valueConfigs,
+                fieldName, (String) values);
+            } else {
+              @SuppressWarnings("unchecked")
+              List<String> listValues = (List<String>) values;
+              for (String stringValue : listValues) {
+                allow = isFieldConditionMatch(valueConfigs,
+                  fieldName, stringValue);
+                if (allow) {
+                  break;
+                }
+              }
+            }
+            if (allow) {
+              break;
+            }
+          }
+        }
+        if (allow) {
+          break;
+        }
+      }
+    }
+    return allow;
+  }
+
+  public static boolean isFieldConditionMatch(Map<String, Object> configs,
+                                              String fieldName, String stringValue) {
+    boolean allow = false;
+    String fieldValue = (String) configs.get(fieldName);
+    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
+      allow = true;
+    } else {
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) configs
+        .get("add_fields");
+      if (addFields != null && addFields.get(fieldName) != null) {
+        String addFieldValue = (String) addFields.get(fieldName);
+        if (stringValue.equalsIgnoreCase(addFieldValue)) {
+          allow = true;
+        }
+      }
+
+    }
+    return allow;
+  }
+
+  public static void logStatForMetric(MetricCount metric, String prefixStr,
+                                      String postFix) {
+    long currStat = metric.count;
+    long currMS = System.currentTimeMillis();
+    if (currStat > metric.prevLogCount) {
+      if (postFix == null) {
+        postFix = "";
+      }
+      logger.info(prefixStr + ": total_count=" + metric.count
+        + ", duration=" + (currMS - metric.prevLogMS) / 1000
+        + " secs, count=" + (currStat - metric.prevLogCount)
+        + postFix);
+    }
+    metric.prevLogCount = currStat;
+    metric.prevLogMS = currMS;
+  }
+
+  public static Map<String, Object> cloneObject(Map<String, Object> map) {
+    if (map == null) {
+      return null;
+    }
+    String jsonStr = gson.toJson(map);
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    return gson.fromJson(jsonStr, type);
+  }
+
+  public static Map<String, Object> toJSONObject(String jsonStr) {
+    if(jsonStr==null || jsonStr.trim().isEmpty()){
+      return new HashMap<String, Object>();
+    }
+    Type type = new TypeToken<Map<String, Object>>() {
+    }.getType();
+    return gson.fromJson(jsonStr, type);
+  }
+
+  static public boolean logErrorMessageByInterval(String key, String message,
+                                                  Throwable e, Logger callerLogger, Level level) {
+
+    LogHistory log = logHistoryList.get(key);
+    if (log == null) {
+      log = new LogHistory();
+      logHistoryList.put(key, log);
+    }
+    if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) {
+      log.lastLogTime = System.currentTimeMillis();
+      int counter = log.counter;
+      log.counter = 0;
+      if (counter > 0) {
+        message += ". Messages suppressed before: " + counter;
+      }
+      if (e == null) {
+        callerLogger.log(level, message);
+      } else {
+        callerLogger.log(level, message, e);
+      }
+
+      return true;
+    } else {
+      log.counter++;
+    }
+    return false;
+
+  }
+
+  static public String subString(String str, int maxLength) {
+    if (str == null || str.length() == 0) {
+      return "";
+    }
+    maxLength = str.length() < maxLength ? str.length() : maxLength;
+    return str.substring(0, maxLength);
+  }
+
+  public static long genHash(String value) {
+    if (value == null) {
+      value = "null";
+    }
+    return MurmurHash.hash64A(value.getBytes(), HASH_SEED);
+  }
+
+  private static class LogHistory {
+    private long lastLogTime = 0;
+    private int counter = 0;
+  }
+
+  public static String getDate(String timeStampStr) {
+    try {
+      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
+    } catch (Exception ex) {
+      logger.error(ex);
+      return null;
+    }
+  }
+
+  public static String getActualDateStr() {
+    try {
+      return dateFormatter.get().format(new Date());
+    } catch (Exception ex) {
+      logger.error(ex);
+      return null;
+    }
+  }
+
+  public static File getFileFromClasspath(String filename) {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
+      .getResource(filename);
+    logger.debug("File Complete URI :" + fileCompleteUrl);
+    File file = null;
+    try {
+      file = new File(fileCompleteUrl.toURI());
+    } catch (Exception exception) {
+      logger.debug(exception.getMessage(), exception.getCause());
+    }
+    return file;
+  }
+
+  public static Object getClassInstance(String classFullName, AliasUtil.ALIAS_TYPE aliasType) {
+    Object instance = null;
+    try {
+      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
+    } catch (Exception exception) {
+      logger.error("Unsupported class =" + classFullName, exception.getCause());
+    }
+    // check instance class as par aliasType
+    if (instance != null) {
+      boolean isValid = false;
+      switch (aliasType) {
+        case FILTER:
+          isValid = Filter.class.isAssignableFrom(instance.getClass());
+          break;
+        case INPUT:
+          isValid = Input.class.isAssignableFrom(instance.getClass());
+          break;
+        case OUTPUT:
+          isValid = Output.class.isAssignableFrom(instance.getClass());
+          break;
+        case MAPPER:
+          isValid = Mapper.class.isAssignableFrom(instance.getClass());
+          break;
+        default:
+          // by default consider all are valid class
+          isValid = true;
+      }
+      if (!isValid) {
+        logger.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
+      }
+    }
+    return instance;
+  }
+
+  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
+    ObjectMapper mapper = new ObjectMapper();
+    try {
+      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {
+      });
+      return jsonmap;
+    } catch (JsonParseException e) {
+      logger.error(e, e.getCause());
+    } catch (JsonMappingException e) {
+      logger.error(e, e.getCause());
+    } catch (IOException e) {
+      logger.error(e, e.getCause());
+    }
+    return new HashMap<String, Object>();
+  }
+
+  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
+    if (list != null) {
+      for (String value : list) {
+        if (value != null) {
+          if (caseSensitive) {
+            if (value.equals(str)) {
+              return true;
+            }
+          } else {
+            if (value.equalsIgnoreCase(str)) {
+              return true;
+            }
+          }
+          if (value.equalsIgnoreCase(LogFeederConstants.ALL)) {
+            return true;
+          }
+        }
+      }
+    }
+    return false;
+  }
+  
+  
+  private static synchronized String setHostNameAndIP() {
+    if (hostName == null || ipAddress == null) {
+      try {
+        InetAddress ip = InetAddress.getLocalHost();
+        ipAddress = ip.getHostAddress();
+        String getHostName = ip.getHostName();
+        String getCanonicalHostName = ip.getCanonicalHostName();
+        if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
+          logger.info("Using getCanonicalHostName()=" + getCanonicalHostName);
+          hostName = getCanonicalHostName;
+        } else {
+          logger.info("Using getHostName()=" + getHostName);
+          hostName = getHostName;
+        }
+        logger.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName
+            + ", getCanonicalHostName=" + getCanonicalHostName + ", hostName="
+            + hostName);
+      } catch (UnknownHostException e) {
+        logger.error("Error getting hostname.", e);
+      }
+    }
+    return hostName;
+  }
+
+  public static String[] mergeArray(String[] first, String[] second) {
+    if (first == null) {
+      first = new String[0];
+    }
+    if (second == null) {
+      second = new String[0];
+    }
+    String[] mergedArray = ObjectArrays.concat(first, second, String.class);
+    return mergedArray;
+  }
+  
+  public static String getLogfeederTempDir() {
+    if (logfeederTempDir == null) {
+      synchronized (_LOCK) {
+        if (logfeederTempDir == null) {
+          String tempDirValue = getStringProperty("logfeeder.tmp.dir",
+              "/tmp/$username/logfeeder/");
+          HashMap<String, String> contextParam = new HashMap<String, String>();
+          String username = System.getProperty("user.name");
+          contextParam.put("username", username);
+          logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue,
+              contextParam);
+        }
+      }
+    }
+    return logfeederTempDir;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/MurmurHash.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/MurmurHash.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/MurmurHash.java
new file mode 100644
index 0000000..dbbefaf
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/MurmurHash.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.util;
+
+import com.google.common.primitives.Ints;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * This is a very fast, non-cryptographic hash suitable for general hash-based
+ * lookup.  See http://murmurhash.googlepages.com/ for more details.
+ * <p/>
+ * <p>The C version of MurmurHash 2.0 found at that site was ported
+ * to Java by Andrzej Bialecki (ab at getopt org).</p>
+ */
+public final class MurmurHash {
+
+  private MurmurHash() {
+  }
+
+  /**
+   * Hashes an int.
+   *
+   * @param data The int to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit hash of the bytes in question.
+   */
+  public static int hash(int data, int seed) {
+    return hash(ByteBuffer.wrap(Ints.toByteArray(data)), seed);
+  }
+
+  /**
+   * Hashes bytes in an array.
+   *
+   * @param data The bytes to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit hash of the bytes in question.
+   */
+  public static int hash(byte[] data, int seed) {
+    return hash(ByteBuffer.wrap(data), seed);
+  }
+
+  /**
+   * Hashes bytes in part of an array.
+   *
+   * @param data   The data to hash.
+   * @param offset Where to start munging.
+   * @param length How many bytes to process.
+   * @param seed   The seed to start with.
+   * @return The 32-bit hash of the data in question.
+   */
+  public static int hash(byte[] data, int offset, int length, int seed) {
+    return hash(ByteBuffer.wrap(data, offset, length), seed);
+  }
+
+  /**
+   * Hashes the bytes in a buffer from the current position to the limit.
+   *
+   * @param buf  The bytes to hash.
+   * @param seed The seed for the hash.
+   * @return The 32 bit murmur hash of the bytes in the buffer.
+   */
+  public static int hash(ByteBuffer buf, int seed) {
+    // save byte order for later restoration
+    ByteOrder byteOrder = buf.order();
+    buf.order(ByteOrder.LITTLE_ENDIAN);
+
+    int m = 0x5bd1e995;
+    int r = 24;
+
+    int h = seed ^ buf.remaining();
+
+    while (buf.remaining() >= 4) {
+      int k = buf.getInt();
+
+      k *= m;
+      k ^= k >>> r;
+      k *= m;
+
+      h *= m;
+      h ^= k;
+    }
+
+    if (buf.remaining() > 0) {
+      ByteBuffer finish = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
+      // for big-endian version, use this first:
+      // finish.position(4-buf.remaining());
+      finish.put(buf).rewind();
+      h ^= finish.getInt();
+      h *= m;
+    }
+
+    h ^= h >>> 13;
+    h *= m;
+    h ^= h >>> 15;
+
+    buf.order(byteOrder);
+    return h;
+  }
+
+
+  public static long hash64A(byte[] data, int seed) {
+    return hash64A(ByteBuffer.wrap(data), seed);
+  }
+
+  public static long hash64A(byte[] data, int offset, int length, int seed) {
+    return hash64A(ByteBuffer.wrap(data, offset, length), seed);
+  }
+
+  public static long hash64A(ByteBuffer buf, int seed) {
+    ByteOrder byteOrder = buf.order();
+    buf.order(ByteOrder.LITTLE_ENDIAN);
+
+    long m = 0xc6a4a7935bd1e995L;
+    int r = 47;
+
+    long h = seed ^ (buf.remaining() * m);
+
+    while (buf.remaining() >= 8) {
+      long k = buf.getLong();
+
+      k *= m;
+      k ^= k >>> r;
+      k *= m;
+
+      h ^= k;
+      h *= m;
+    }
+
+    if (buf.remaining() > 0) {
+      ByteBuffer finish = ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN);
+      // for big-endian version, do this first:
+      // finish.position(8-buf.remaining());
+      finish.put(buf).rewind();
+      h ^= finish.getLong();
+      h *= m;
+    }
+
+    h ^= h >>> r;
+    h *= m;
+    h ^= h >>> r;
+
+    buf.order(byteOrder);
+    return h;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
new file mode 100644
index 0000000..10ea2c2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.zip.GZIPInputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.log4j.Logger;
+
+import com.amazonaws.AmazonClientException;
+import com.amazonaws.auth.AWSCredentials;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3Client;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.ObjectMetadata;
+import com.amazonaws.services.s3.model.PutObjectRequest;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.transfer.TransferManager;
+import com.amazonaws.services.s3.transfer.Upload;
+
+/**
+ * Utility to connect to s3
+ */
+public class S3Util {
+  public static final S3Util INSTANCE = new S3Util();
+
+  private static final Logger LOG = Logger.getLogger(S3Util.class);
+
+  public static final String S3_PATH_START_WITH = "s3://";
+  public static final String S3_PATH_SEPARATOR = "/";
+
+  public AmazonS3 getS3Client(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
+        accessKey, secretKey);
+    AmazonS3 s3client;
+    if (awsCredentials != null) {
+      s3client = new AmazonS3Client(awsCredentials);
+    } else {
+      s3client = new AmazonS3Client();
+    }
+    return s3client;
+  }
+
+  public TransferManager getTransferManager(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
+        accessKey, secretKey);
+    TransferManager transferManager;
+    if (awsCredentials != null) {
+      transferManager = new TransferManager(awsCredentials);
+    } else {
+      transferManager = new TransferManager();
+    }
+    return transferManager;
+  }
+
+  public void shutdownTransferManager(TransferManager transferManager) {
+    if (transferManager != null) {
+      transferManager.shutdownNow();
+    }
+  }
+
+  public String getBucketName(String s3Path) {
+    String bucketName = null;
+    // s3path
+    if (s3Path != null) {
+      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
+          S3_PATH_SEPARATOR);
+      bucketName = s3PathParts[0];
+    }
+    return bucketName;
+  }
+
+  public String getS3Key(String s3Path) {
+    StringBuilder s3Key = new StringBuilder();
+    // s3path
+    if (s3Path != null) {
+      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
+          S3_PATH_SEPARATOR);
+      ArrayList<String> s3PathList = new ArrayList<String>(
+          Arrays.asList(s3PathParts));
+      s3PathList.remove(0);// remove bucketName
+      for (int index = 0; index < s3PathList.size(); index++) {
+        if (index > 0) {
+          s3Key.append(S3_PATH_SEPARATOR);
+        }
+        s3Key.append(s3PathList.get(index));
+      }
+    }
+    return s3Key.toString();
+  }
+
+  public void uploadFileTos3(String bucketName, String s3Key, File localFile,
+      String accessKey, String secretKey) {
+    TransferManager transferManager = getTransferManager(accessKey, secretKey);
+    try {
+      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
+      upload.waitForUploadResult();
+    } catch (AmazonClientException | InterruptedException e) {
+      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(),
+          e);
+    } finally {
+      shutdownTransferManager(transferManager);
+    }
+  }
+
+  /**
+   * Get the buffer reader to read s3 file as a stream
+   */
+  public BufferedReader getReader(String s3Path, String accessKey,
+      String secretKey) throws IOException {
+    // TODO error handling
+    // Compression support
+    // read header and decide the compression(auto detection)
+    // For now hard-code GZIP compression
+    String s3Bucket = getBucketName(s3Path);
+    String s3Key = getS3Key(s3Path);
+    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(
+        new GetObjectRequest(s3Bucket, s3Key));
+    GZIPInputStream objectInputStream;
+    try {
+      objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
+      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(
+          objectInputStream));
+      return bufferedReader;
+    } catch (IOException e) {
+      LOG.error("Error in creating stream reader for s3 file :" + s3Path,
+          e.getCause());
+      throw e;
+    }
+  }
+
+  public void writeIntoS3File(String data, String bucketName, String s3Key,
+      String accessKey, String secretKey) {
+    InputStream in = null;
+    try {
+      in = IOUtils.toInputStream(data, "UTF-8");
+    } catch (IOException e) {
+      LOG.error(e);
+    }
+    if (in != null) {
+      TransferManager transferManager = getTransferManager(accessKey, secretKey);
+      try {
+        if (transferManager != null) {
+          transferManager.upload(
+                  new PutObjectRequest(bucketName, s3Key, in,
+                  new ObjectMetadata())).waitForUploadResult();
+          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :"
+              + bucketName);
+        }
+      } catch (AmazonClientException | InterruptedException e) {
+        LOG.error(e);
+      } finally {
+        try {
+          shutdownTransferManager(transferManager);
+          in.close();
+        } catch (IOException e) {
+          // ignore
+        }
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
index aaf809f..44113e1 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logfeeder.util;
 import java.io.IOException;
 import java.util.HashMap;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
index 9f943ec..3aa8d7b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
@@ -21,9 +21,9 @@ package org.apache.ambari.logfeeder.filter;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.OutputMgr;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.output.OutputMgr;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
index cdec4df..64e9b69 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
@@ -25,10 +25,10 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.TimeZone;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
-import org.apache.ambari.logfeeder.OutputMgr;
-import org.apache.ambari.logfeeder.exception.LogfeederException;
+import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
index 58db8f2..849e4c3 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
@@ -21,8 +21,8 @@ package org.apache.ambari.logfeeder.filter;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.OutputMgr;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.output.OutputMgr;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
index 2242a83..42e81da 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
@@ -25,7 +25,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.InputMgr;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.commons.io.FileUtils;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
index 2df03bd..0652182 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
@@ -24,7 +24,7 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.time.DateUtils;
 import org.apache.log4j.Logger;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
index 49cee56..cc6da56 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
@@ -18,11 +18,12 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.LogFeederUtil;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
 
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+
 public class S3LogPathResolverTest {
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
index a0c398e..c64e0c5 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
@@ -18,7 +18,7 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.s3.S3Util;
+import org.apache.ambari.logfeeder.util.S3Util;
 import org.junit.Test;
 
 import java.io.File;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/AWSUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/AWSUtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/AWSUtilTest.java
deleted file mode 100644
index 4f0d1aa..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/AWSUtilTest.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.s3;
-
-public class AWSUtilTest {
-  public void testAWSUtil_getAwsUserName() throws Exception {
-    String S3_ACCESS_KEY = "S3_ACCESS_KEY";
-    String S3_SECRET_KEY = "S3_SECRET_KEY";
-    AWSUtil.INSTANCE.getAwsUserName(S3_ACCESS_KEY, S3_SECRET_KEY);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/S3UtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/S3UtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/S3UtilTest.java
deleted file mode 100644
index af14140..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/s3/S3UtilTest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.s3;
-
-import static org.junit.Assert.assertEquals;
-
-public class S3UtilTest {
-  public void testS3Util_pathToBucketName() throws Exception {
-    String s3Path = "s3://bucket_name/path/file.txt";
-    String expectedBucketName = "bucket_name";
-    String actualBucketName = S3Util.INSTANCE.getBucketName(s3Path);
-    assertEquals(expectedBucketName, actualBucketName);
-  }
-
-  public void testS3Util_pathToS3Key() throws Exception {
-    String s3Path = "s3://bucket_name/path/file.txt";
-    String expectedS3key = "path/file.txt";
-    String actualS3key = S3Util.INSTANCE.getS3Key(s3Path);
-    assertEquals(expectedS3key, actualS3key);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
new file mode 100644
index 0000000..6df2283
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import org.apache.ambari.logfeeder.util.AWSUtil;
+
+public class AWSUtilTest {
+  public void testAWSUtil_getAwsUserName() throws Exception {
+    String S3_ACCESS_KEY = "S3_ACCESS_KEY";
+    String S3_SECRET_KEY = "S3_SECRET_KEY";
+    AWSUtil.INSTANCE.getAwsUserName(S3_ACCESS_KEY, S3_SECRET_KEY);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
new file mode 100644
index 0000000..84554b0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.util;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.ambari.logfeeder.util.S3Util;
+
+public class S3UtilTest {
+  public void testS3Util_pathToBucketName() throws Exception {
+    String s3Path = "s3://bucket_name/path/file.txt";
+    String expectedBucketName = "bucket_name";
+    String actualBucketName = S3Util.INSTANCE.getBucketName(s3Path);
+    assertEquals(expectedBucketName, actualBucketName);
+  }
+
+  public void testS3Util_pathToS3Key() throws Exception {
+    String s3Path = "s3://bucket_name/path/file.txt";
+    String expectedS3key = "path/file.txt";
+    String actualS3key = S3Util.INSTANCE.getS3Key(s3Path);
+    assertEquals(expectedS3key, actualS3key);
+  }
+
+}


[33/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
new file mode 100644
index 0000000..7430770
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
+import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
+import org.apache.ambari.logsearch.view.VUserConfig;
+import org.apache.ambari.logsearch.view.VUserConfigList;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrInputDocument;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Inject;
+
+@Component
+public class UserConfigManager extends JsonManagerBase {
+
+  private static final Logger logger = Logger.getLogger(UserConfigManager.class);
+
+  @Inject
+  private UserConfigSolrDao userConfigSolrDao;
+  @Inject
+  private QueryGeneration queryGenerator;
+
+  public String saveUserConfig(VUserConfig vHistory) {
+
+    SolrInputDocument solrInputDoc = new SolrInputDocument();
+    if (!isValid(vHistory)) {
+      throw RESTErrorUtil.createRESTException("No FilterName Specified", MessageEnums.INVALID_INPUT_DATA);
+    }
+
+    if (isNotUnique(vHistory) && !vHistory.isOverwrite()) {
+      throw RESTErrorUtil.createRESTException( "Name '" + vHistory.getFiltername() + "' already exists", MessageEnums.INVALID_INPUT_DATA);
+    }
+    String loggedInUserName = vHistory.getUserName();
+    String filterName = vHistory.getFiltername();
+
+    solrInputDoc.addField(LogSearchConstants.ID, vHistory.getId());
+    solrInputDoc.addField(LogSearchConstants.USER_NAME, loggedInUserName);
+    solrInputDoc.addField(LogSearchConstants.VALUES, vHistory.getValues());
+    solrInputDoc.addField(LogSearchConstants.FILTER_NAME, filterName);
+    solrInputDoc.addField(LogSearchConstants.ROW_TYPE, vHistory.getRowType());
+    List<String> shareNameList = vHistory.getShareNameList();
+    if (shareNameList != null && !shareNameList.isEmpty()) {
+      solrInputDoc.addField(LogSearchConstants.SHARE_NAME_LIST, shareNameList);
+    }
+    // Check whether the Filter Name exists in solr
+    SolrQuery solrQuery = new SolrQuery();
+    SolrUtil.setMainQuery(solrQuery, null);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.USER_NAME, loggedInUserName);
+    try {
+      QueryResponse queryResponse = userConfigSolrDao.process(solrQuery);
+      if (queryResponse != null) {
+        SolrDocumentList documentList = queryResponse.getResults();
+        if (documentList != null && !documentList.isEmpty() && !vHistory.isOverwrite()) {
+          logger.error("Filtername is already present");
+          throw RESTErrorUtil.createRESTException("Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
+        }
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error in checking same filtername config", e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+    try {
+      userConfigSolrDao.addDocs(solrInputDoc);
+      return convertObjToString(solrInputDoc);
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error saving user config. solrDoc=" + solrInputDoc, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  private boolean isNotUnique(VUserConfig vHistory) {
+    String filterName = vHistory.getFiltername();
+    String rowType = vHistory.getRowType();
+
+    if (filterName != null && rowType != null) {
+      SolrQuery solrQuery = new SolrQuery();
+      filterName = SolrUtil.makeSearcableString(filterName);
+      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName + "-" + rowType);
+      SolrUtil.setRowCount(solrQuery, 0);
+      try {
+        Long numFound = userConfigSolrDao.process(solrQuery).getResults().getNumFound();
+        if (numFound > 0) {
+          return true;
+        }
+      } catch (SolrException | SolrServerException | IOException e) {
+        logger.error("Error while checking if userConfig is unique.", e);
+      }
+    }
+    return false;
+  }
+
+  private boolean isValid(VUserConfig vHistory) {
+    return !StringUtils.isBlank(vHistory.getFiltername())
+        && !StringUtils.isBlank(vHistory.getRowType())
+        && !StringUtils.isBlank(vHistory.getUserName())
+        && !StringUtils.isBlank(vHistory.getValues());
+  }
+
+  public void deleteUserConfig(String id) {
+    try {
+      userConfigSolrDao.deleteUserConfig(id);
+    } catch (SolrException | SolrServerException | IOException e) {
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public String getUserConfig(SearchCriteria searchCriteria) {
+
+    SolrDocumentList solrList = new SolrDocumentList();
+    VUserConfigList userConfigList = new VUserConfigList();
+
+    String rowType = (String) searchCriteria.getParamValue(LogSearchConstants.ROW_TYPE);
+    if (StringUtils.isBlank(rowType)) {
+      throw RESTErrorUtil.createRESTException("row type was not specified", MessageEnums.INVALID_INPUT_DATA);
+    }
+
+    String userName = (String) searchCriteria.getParamValue(LogSearchConstants.USER_NAME);
+    if (StringUtils.isBlank(userName)) {
+      throw RESTErrorUtil.createRESTException("user name was not specified", MessageEnums.INVALID_INPUT_DATA);
+    }
+    String filterName = (String) searchCriteria.getParamValue(LogSearchConstants.FILTER_NAME);
+    filterName = StringUtils.isBlank(filterName) ? "*" : "*" + filterName + "*";
+
+    try {
+
+      SolrQuery userConfigQuery = new SolrQuery();
+      SolrUtil.setMainQuery(userConfigQuery, null);
+      queryGenerator.setPagination(userConfigQuery, searchCriteria);
+      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.ROW_TYPE, rowType);
+      queryGenerator.setSingleORFilter(userConfigQuery, LogSearchConstants.USER_NAME, userName, LogSearchConstants.SHARE_NAME_LIST, userName);
+      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
+
+      if (StringUtils.isBlank(searchCriteria.getSortBy())) {
+        searchCriteria.setSortBy(LogSearchConstants.FILTER_NAME);
+      }
+      if (StringUtils.isBlank(searchCriteria.getSortType())) {
+        searchCriteria.setSortType("" + SolrQuery.ORDER.asc);
+      }
+
+      queryGenerator.setSingleSortOrder(userConfigQuery, searchCriteria);
+      solrList = userConfigSolrDao.process(userConfigQuery).getResults();
+
+      Collection<VUserConfig> configList = new ArrayList<VUserConfig>();
+
+      for (SolrDocument solrDoc : solrList) {
+        VUserConfig userConfig = new VUserConfig();
+        userConfig.setFiltername("" + solrDoc.get(LogSearchConstants.FILTER_NAME));
+        userConfig.setId("" + solrDoc.get(LogSearchConstants.ID));
+        userConfig.setValues("" + solrDoc.get(LogSearchConstants.VALUES));
+        userConfig.setRowType("" + solrDoc.get(LogSearchConstants.ROW_TYPE));
+        try {
+          List<String> shareNameList = (List<String>) solrDoc.get(LogSearchConstants.SHARE_NAME_LIST);
+          userConfig.setShareNameList(shareNameList);
+        } catch (Exception e) {
+          // do nothing
+        }
+
+        userConfig.setUserName("" + solrDoc.get(LogSearchConstants.USER_NAME));
+
+        configList.add(userConfig);
+      }
+
+      userConfigList.setName("historyList");
+      userConfigList.setUserConfigList(configList);
+
+      userConfigList.setStartIndex(searchCriteria.getStartIndex());
+      userConfigList.setPageSize((int) searchCriteria.getMaxRows());
+
+      userConfigList.setTotalCount((long) solrList.getNumFound());
+    } catch (SolrException | SolrServerException | IOException e) {
+      // do nothing
+      logger.error(e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+    return convertObjToString(userConfigList);
+
+  }
+
+  public String updateUserConfig(VUserConfig vuserConfig) {
+    return saveUserConfig(vuserConfig);
+  }
+
+  // ////////////////////////////LEVEL FILTER/////////////////////////////////////
+
+  public String getUserFilter() {
+    VLogfeederFilterWrapper userFilter;
+    try {
+      userFilter = userConfigSolrDao.getUserFilter();
+    } catch (SolrServerException | IOException e) {
+      logger.error(e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+    return convertObjToString(userFilter);
+  }
+
+  public String saveUserFiter(String json) {
+    if (!StringUtils.isBlank(json)) {
+      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
+      try {
+        if (logfeederFilterWrapper == null) {
+          logger.error(json + " is a invalid json");
+        }
+        userConfigSolrDao.saveUserFilter(logfeederFilterWrapper);
+      } catch (SolrException | SolrServerException | IOException e) {
+        logger.error("user config not able to save", e);
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+    }
+    return getUserFilter();
+  }
+
+  public String getAllUserName() {
+    List<String> userList = new ArrayList<String>();
+    try {
+      SolrQuery userListQuery = new SolrQuery();
+      SolrUtil.setMainQuery(userListQuery, null);
+      SolrUtil.setFacetField(userListQuery, LogSearchConstants.USER_NAME);
+      QueryResponse queryResponse = userConfigSolrDao.process(userListQuery);
+      if (queryResponse == null) {
+        return convertObjToString(userList);
+      }
+      List<Count> counList = queryResponse.getFacetField(LogSearchConstants.USER_NAME).getValues();
+      for (Count cnt : counList) {
+        String userName = cnt.getName();
+        userList.add(userName);
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.warn("Error getting all users.", e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+    return convertObjToString(userList);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
deleted file mode 100644
index 59c1bbd..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.manager;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
-import org.apache.ambari.logsearch.query.QueryGeneration;
-import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
-import org.apache.ambari.logsearch.view.VUserConfig;
-import org.apache.ambari.logsearch.view.VUserConfigList;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.FacetField.Count;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrInputDocument;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class UserConfigMgr extends MgrBase {
-
-  private static final Logger logger = Logger.getLogger(UserConfigMgr.class);
-
-  @Autowired
-  private UserConfigSolrDao userConfigSolrDao;
-  @Autowired
-  private QueryGeneration queryGenerator;
-
-  public String saveUserConfig(VUserConfig vHistory) {
-
-    SolrInputDocument solrInputDoc = new SolrInputDocument();
-    if (!isValid(vHistory)) {
-      throw RESTErrorUtil.createRESTException("No FilterName Specified", MessageEnums.INVALID_INPUT_DATA);
-    }
-
-    if (isNotUnique(vHistory) && !vHistory.isOverwrite()) {
-      throw RESTErrorUtil.createRESTException( "Name '" + vHistory.getFiltername() + "' already exists", MessageEnums.INVALID_INPUT_DATA);
-    }
-    String loggedInUserName = vHistory.getUserName();
-    String filterName = vHistory.getFiltername();
-
-    solrInputDoc.addField(LogSearchConstants.ID, vHistory.getId());
-    solrInputDoc.addField(LogSearchConstants.USER_NAME, loggedInUserName);
-    solrInputDoc.addField(LogSearchConstants.VALUES, vHistory.getValues());
-    solrInputDoc.addField(LogSearchConstants.FILTER_NAME, filterName);
-    solrInputDoc.addField(LogSearchConstants.ROW_TYPE, vHistory.getRowType());
-    List<String> shareNameList = vHistory.getShareNameList();
-    if (shareNameList != null && !shareNameList.isEmpty()) {
-      solrInputDoc.addField(LogSearchConstants.SHARE_NAME_LIST, shareNameList);
-    }
-    // Check whether the Filter Name exists in solr
-    SolrQuery solrQuery = new SolrQuery();
-    SolrUtil.setMainQuery(solrQuery, null);
-    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
-    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.USER_NAME, loggedInUserName);
-    try {
-      QueryResponse queryResponse = userConfigSolrDao.process(solrQuery);
-      if (queryResponse != null) {
-        SolrDocumentList documentList = queryResponse.getResults();
-        if (documentList != null && !documentList.isEmpty() && !vHistory.isOverwrite()) {
-          logger.error("Filtername is already present");
-          throw RESTErrorUtil.createRESTException("Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
-        }
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error in checking same filtername config", e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    try {
-      userConfigSolrDao.addDocs(solrInputDoc);
-      return convertObjToString(solrInputDoc);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error saving user config. solrDoc=" + solrInputDoc, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  private boolean isNotUnique(VUserConfig vHistory) {
-    String filterName = vHistory.getFiltername();
-    String rowType = vHistory.getRowType();
-
-    if (filterName != null && rowType != null) {
-      SolrQuery solrQuery = new SolrQuery();
-      filterName = SolrUtil.makeSearcableString(filterName);
-      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName + "-" + rowType);
-      SolrUtil.setRowCount(solrQuery, 0);
-      try {
-        Long numFound = userConfigSolrDao.process(solrQuery).getResults().getNumFound();
-        if (numFound > 0) {
-          return true;
-        }
-      } catch (SolrException | SolrServerException | IOException e) {
-        logger.error("Error while checking if userConfig is unique.", e);
-      }
-    }
-    return false;
-  }
-
-  private boolean isValid(VUserConfig vHistory) {
-    return !StringUtils.isBlank(vHistory.getFiltername())
-        && !StringUtils.isBlank(vHistory.getRowType())
-        && !StringUtils.isBlank(vHistory.getUserName())
-        && !StringUtils.isBlank(vHistory.getValues());
-  }
-
-  public void deleteUserConfig(String id) {
-    try {
-      userConfigSolrDao.deleteUserConfig(id);
-    } catch (SolrException | SolrServerException | IOException e) {
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  public String getUserConfig(SearchCriteria searchCriteria) {
-
-    SolrDocumentList solrList = new SolrDocumentList();
-    VUserConfigList userConfigList = new VUserConfigList();
-
-    String rowType = (String) searchCriteria.getParamValue(LogSearchConstants.ROW_TYPE);
-    if (StringUtils.isBlank(rowType)) {
-      throw RESTErrorUtil.createRESTException("row type was not specified", MessageEnums.INVALID_INPUT_DATA);
-    }
-
-    String userName = (String) searchCriteria.getParamValue(LogSearchConstants.USER_NAME);
-    if (StringUtils.isBlank(userName)) {
-      throw RESTErrorUtil.createRESTException("user name was not specified", MessageEnums.INVALID_INPUT_DATA);
-    }
-    String filterName = (String) searchCriteria.getParamValue(LogSearchConstants.FILTER_NAME);
-    filterName = StringUtils.isBlank(filterName) ? "*" : "*" + filterName + "*";
-
-    try {
-
-      SolrQuery userConfigQuery = new SolrQuery();
-      SolrUtil.setMainQuery(userConfigQuery, null);
-      queryGenerator.setPagination(userConfigQuery, searchCriteria);
-      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.ROW_TYPE, rowType);
-      queryGenerator.setSingleORFilter(userConfigQuery, LogSearchConstants.USER_NAME, userName, LogSearchConstants.SHARE_NAME_LIST, userName);
-      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
-
-      if (StringUtils.isBlank(searchCriteria.getSortBy())) {
-        searchCriteria.setSortBy(LogSearchConstants.FILTER_NAME);
-      }
-      if (StringUtils.isBlank(searchCriteria.getSortType())) {
-        searchCriteria.setSortType("" + SolrQuery.ORDER.asc);
-      }
-
-      queryGenerator.setSingleSortOrder(userConfigQuery, searchCriteria);
-      solrList = userConfigSolrDao.process(userConfigQuery).getResults();
-
-      Collection<VUserConfig> configList = new ArrayList<VUserConfig>();
-
-      for (SolrDocument solrDoc : solrList) {
-        VUserConfig userConfig = new VUserConfig();
-        userConfig.setFiltername("" + solrDoc.get(LogSearchConstants.FILTER_NAME));
-        userConfig.setId("" + solrDoc.get(LogSearchConstants.ID));
-        userConfig.setValues("" + solrDoc.get(LogSearchConstants.VALUES));
-        userConfig.setRowType("" + solrDoc.get(LogSearchConstants.ROW_TYPE));
-        try {
-          List<String> shareNameList = (List<String>) solrDoc.get(LogSearchConstants.SHARE_NAME_LIST);
-          userConfig.setShareNameList(shareNameList);
-        } catch (Exception e) {
-          // do nothing
-        }
-
-        userConfig.setUserName("" + solrDoc.get(LogSearchConstants.USER_NAME));
-
-        configList.add(userConfig);
-      }
-
-      userConfigList.setName("historyList");
-      userConfigList.setUserConfigList(configList);
-
-      userConfigList.setStartIndex(searchCriteria.getStartIndex());
-      userConfigList.setPageSize((int) searchCriteria.getMaxRows());
-
-      userConfigList.setTotalCount((long) solrList.getNumFound());
-    } catch (SolrException | SolrServerException | IOException e) {
-      // do nothing
-      logger.error(e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    return convertObjToString(userConfigList);
-
-  }
-
-  public String updateUserConfig(VUserConfig vuserConfig) {
-    return saveUserConfig(vuserConfig);
-  }
-
-  // ////////////////////////////LEVEL FILTER/////////////////////////////////////
-
-  public String getUserFilter() {
-    VLogfeederFilterWrapper userFilter;
-    try {
-      userFilter = userConfigSolrDao.getUserFilter();
-    } catch (SolrServerException | IOException e) {
-      logger.error(e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-    return convertObjToString(userFilter);
-  }
-
-  public String saveUserFiter(String json) {
-    if (!StringUtils.isBlank(json)) {
-      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
-      try {
-        if (logfeederFilterWrapper == null) {
-          logger.error(json + " is a invalid json");
-        }
-        userConfigSolrDao.saveUserFilter(logfeederFilterWrapper);
-      } catch (SolrException | SolrServerException | IOException e) {
-        logger.error("user config not able to save", e);
-        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-      }
-    }
-    return getUserFilter();
-  }
-
-  public String getAllUserName() {
-    List<String> userList = new ArrayList<String>();
-    try {
-      SolrQuery userListQuery = new SolrQuery();
-      SolrUtil.setMainQuery(userListQuery, null);
-      SolrUtil.setFacetField(userListQuery, LogSearchConstants.USER_NAME);
-      QueryResponse queryResponse = userConfigSolrDao.process(userListQuery);
-      if (queryResponse == null) {
-        return convertObjToString(userList);
-      }
-      List<Count> counList = queryResponse.getFacetField(LogSearchConstants.USER_NAME).getValues();
-      for (Count cnt : counList) {
-        String userName = cnt.getName();
-        userList.add(userName);
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.warn("Error getting all users.", e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-    return convertObjToString(userList);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java
new file mode 100644
index 0000000..e92d7bf
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/AnyGraphParamDefinition.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.X_AXIS_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.Y_AXIS_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.STACK_BY_D;
+
+public interface AnyGraphParamDefinition {
+
+  String getxAxis();
+
+  @ApiParam(value = X_AXIS_D, name = LogSearchConstants.REQUEST_PARAM_XAXIS)
+  void setxAxis(String xAxis);
+
+  String getyAxis();
+
+  @ApiParam(value = Y_AXIS_D, name = LogSearchConstants.REQUEST_PARAM_YAXIS)
+  void setyAxis(String yAxis);
+
+  String getStackBy();
+
+  @ApiParam(value = STACK_BY_D, name = LogSearchConstants.REQUEST_PARAM_STACK_BY)
+  void setStackBy(String stackBy);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java
new file mode 100644
index 0000000..aa1a393
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/BundleIdParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.BUNDLE_ID;
+
+public interface BundleIdParamDefinition {
+
+  String getBundleId();
+
+  @ApiParam(value = BUNDLE_ID, name = LogSearchConstants.REQUEST_PARAM_BUNDLE_ID)
+  void setBundleId(String bundleId);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java
new file mode 100644
index 0000000..ef334d1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/CommonSearchParamDefinition.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.START_TIME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.END_TIME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.START_INDEX_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.PAGE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.PAGE_SIZE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.SORT_BY_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.SORT_TYPE_D;
+
+
+public interface CommonSearchParamDefinition {
+
+  String getStartIndex();
+
+  @ApiParam(value = START_INDEX_D, name = LogSearchConstants.REQUEST_PARAM_START_INDEX)
+  void setStartIndex(String startIndex);
+
+  String getPage();
+
+  @ApiParam(value = PAGE_D, name = LogSearchConstants.REQUEST_PARAM_PAGE)
+  void setPage(String page);
+
+  String getPageSize();
+
+  @ApiParam(value = PAGE_SIZE_D, name = LogSearchConstants.REQUEST_PARAM_PAGE_SIZE)
+  void setPageSize(String pageSize);
+
+  String getSortBy();
+
+  @ApiParam(value = SORT_BY_D, name = LogSearchConstants.REQUEST_PARAM_SORT_BY)
+  void setSortBy(String sortBy);
+
+  String getSortType();
+
+  @ApiParam(value = SORT_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_SORT_TYPE)
+  void setSortType(String sortType);
+
+  String getStartTime();
+
+  @ApiParam(value = START_TIME_D, name = LogSearchConstants.REQUEST_PARAM_START_TIME)
+  void setStartTime(String startTime);
+
+  String getEndTime();
+
+  @ApiParam(value = END_TIME_D, name = LogSearchConstants.REQUEST_PARAM_END_TIME)
+  void setEndTime(String endTime);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java
new file mode 100644
index 0000000..e6edf7c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/DateRangeParamDefinition.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FROM_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.TO_D;
+
+public interface DateRangeParamDefinition {
+
+  String getFrom();
+
+  @ApiParam(value = FROM_D, name = LogSearchConstants.REQUEST_PARAM_FROM)
+  void setFrom(String from);
+
+  String getTo();
+
+  @ApiParam(value = TO_D, name = LogSearchConstants.REQUEST_PARAM_TO)
+  void setTo(String to);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java
new file mode 100644
index 0000000..396fa93
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FieldParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FIELD_D;
+
+public interface FieldParamDefinition {
+
+  String getField();
+
+  @ApiParam(value = FIELD_D, name = LogSearchConstants.REQUEST_PARAM_FIELD)
+  void setField(String field);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java
new file mode 100644
index 0000000..47f0620
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/FormatParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.FORMAT_D;
+
+public interface FormatParamDefinition {
+
+  String getFormat();
+
+  @ApiParam(value = FORMAT_D, name = LogSearchConstants.REQUEST_PARAM_FORMAT)
+  void setFormat(String format);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java
new file mode 100644
index 0000000..c8531db
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LastPageParamDefinition.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.IS_LAST_PAGE_D;
+
+public interface LastPageParamDefinition {
+  boolean isLastPage();
+
+  @ApiParam(value = IS_LAST_PAGE_D, name = LogSearchConstants.REQUEST_PARAM_LAST_PAGE)
+  void setLastPage(boolean lastPage);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileParamDefinition.java
new file mode 100644
index 0000000..c355989
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileParamDefinition.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.COMPONENT_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.HOST_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.LOG_TYPE_D;
+
+
+public interface LogFileParamDefinition {
+
+  String getComponent();
+
+  @ApiParam(value = COMPONENT_D, name = LogSearchConstants.REQUEST_PARAM_COMPONENT)
+  void setComponent(String component);
+
+  String getHost();
+
+  @ApiParam(value = HOST_D, name = LogSearchConstants.REQUEST_PARAM_HOST)
+  void setHost(String host);
+
+  String getLogType();
+
+  @ApiParam(value = LOG_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_LOG_TYPE)
+  void setLogType(String logType);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileTailParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileTailParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileTailParamDefinition.java
new file mode 100644
index 0000000..a527c48
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogFileTailParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.TAIL_SIZE_D;
+
+public interface LogFileTailParamDefinition {
+
+  String getTailSize();
+
+  @ApiParam(value = TAIL_SIZE_D, name = LogSearchConstants.REQUEST_PARAM_TAIL_SIZE)
+  void setTailSize(String tailSize);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java
new file mode 100644
index 0000000..e503ff5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogParamDefinition.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.COLUMN_QUERY_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.I_MESSAGE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.G_E_MESSAGE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.MUST_BE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.MUST_NOT_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.INCLUDE_QUERY_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.EXCLUDE_QUERY_D;
+
+public interface LogParamDefinition {
+
+  String getColumnQuery();
+
+  @ApiParam(value = COLUMN_QUERY_D, name = LogSearchConstants.REQUEST_PARAM_COLUMN_QUERY)
+  void setColumnQuery(String columnQuery);
+
+  String getiMessage();
+
+  @ApiParam(value = I_MESSAGE_D, name = LogSearchConstants.REQUEST_PARAM_I_MESSAGE)
+  void setiMessage(String iMessage);
+
+  String getgEMessage();
+
+  @ApiParam(value = G_E_MESSAGE_D, name = LogSearchConstants.REQUEST_PARAM_G_E_MESSAGE)
+  void setgEMessage(String gEMessage);
+
+  String getMustBe();
+
+  @ApiParam(value = MUST_BE_D, name = LogSearchConstants.REQUEST_PARAM_MUST_BE)
+  void setMustBe(String mustBe);
+
+  String getMustNot();
+
+  @ApiParam(value = MUST_NOT_D, name = LogSearchConstants.REQUEST_PARAM_MUST_NOT)
+  void setMustNot(String mustNot);
+
+  String getIncludeQuery();
+
+  @ApiParam(value = INCLUDE_QUERY_D, name = LogSearchConstants.REQUEST_PARAM_INCLUDE_QUERY)
+  void setIncludeQuery(String includeQuery);
+
+  String getExcludeQuery();
+
+  @ApiParam(value = EXCLUDE_QUERY_D, name = LogSearchConstants.REQUEST_PARAM_EXCLUDE_QUERY)
+  void setExcludeQuery(String excludeQuery);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java
new file mode 100644
index 0000000..c3e2998
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/LogTruncatedParamDefinition.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.ID_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.SCROLL_TYPE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.NUMBER_ROWS_D;
+
+public interface LogTruncatedParamDefinition {
+
+  String getId();
+
+  @ApiParam(value = ID_D, name = LogSearchConstants.REQUEST_PARAM_ID)
+  void setId(String id);
+
+  String getScrollType();
+
+  @ApiParam(value = SCROLL_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_SCROLL_TYPE)
+  void setScrollType(String scrollType);
+
+  String getNumberRows();
+
+  @ApiParam(value = NUMBER_ROWS_D, name = LogSearchConstants.REQUEST_PARAM_NUMBER_ROWS)
+  void setNumberRows(String numberRows);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/QueryParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/QueryParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/QueryParamDefinition.java
new file mode 100644
index 0000000..3fcdbc0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/QueryParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.QUERY_D;
+
+public interface QueryParamDefinition {
+
+  String getQuery();
+
+  @ApiParam(value = QUERY_D, name = LogSearchConstants.REQUEST_PARAM_QUERY)
+  void setQuery(String query);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java
new file mode 100644
index 0000000..0015c09
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/SearchRequest.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public interface SearchRequest {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogFileParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogFileParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogFileParamDefinition.java
new file mode 100644
index 0000000..a266b3e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogFileParamDefinition.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.COMPONENT_D;
+
+public interface ServiceLogFileParamDefinition {
+
+  String getHostLogFile();
+
+  @ApiParam(value = HOST_D, name = LogSearchConstants.REQUEST_PARAM_HOST_LOG_FILE)
+  void setHostLogFile(String hostLogFile);
+
+  String getComponentLogFile();
+
+  @ApiParam(value = COMPONENT_D, name = LogSearchConstants.REQUEST_PARAM_COMPONENT_LOG_FILE)
+  void setComponentLogFile(String componentLogFile);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
new file mode 100644
index 0000000..f280ac2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogParamDefinition.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.LEVEL_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.ADVANCED_SEARCH_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.TREE_PARAMS_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.E_MESSAGE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.G_MUST_NOT_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.HOST_NAME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.COMPONENT_NAME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.FILE_NAME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.DATE_RANGE_LABEL_D;
+
+public interface ServiceLogParamDefinition {
+
+  String getLevel();
+
+  @ApiParam(value = LEVEL_D, name = LogSearchConstants.REQUEST_PARAM_LEVEL)
+  void setLevel(String level);
+
+  String getAdvancedSearch();
+
+  @ApiParam(value = ADVANCED_SEARCH_D, name = LogSearchConstants.REQUEST_PARAM_ADVANCED_SEARCH)
+  void setAdvancedSearch(String advancedSearch);
+
+  String getTreeParams();
+
+  @ApiParam(value = TREE_PARAMS_D, name = LogSearchConstants.REQUEST_PARAM_TREE_PARAMS)
+  void setTreeParams(String treeParams);
+
+  String geteMessage();
+
+  @ApiParam(value = E_MESSAGE_D, name = LogSearchConstants.REQUEST_PARAM_E_MESSAGE)
+  void seteMessage(String eMessage);
+
+  String getgMustNot();
+
+  @ApiParam(value = G_MUST_NOT_D, name = LogSearchConstants.REQUEST_PARAM_G_MUST_NOT)
+  void setgMustNot(String gMustNot);
+
+  String getHostName();
+
+  @ApiParam(value = HOST_NAME_D, name = LogSearchConstants.REQUEST_PARAM_HOST_NAME)
+  void setHostName(String hostName);
+
+  String getComponentName();
+
+  @ApiParam(value = COMPONENT_NAME_D, name = LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME)
+  void setComponentName(String componentName);
+
+  String getFileName();
+
+  @ApiParam(value = FILE_NAME_D, name = LogSearchConstants.REQUEST_PARAM_FILE_NAME)
+  void setFileName(String fileName);
+
+  String getDateRangeLabel();
+
+  @ApiParam(value = DATE_RANGE_LABEL_D, name = LogSearchConstants.REQUEST_PARAM_DATE_RANGE_LABEL)
+  void setDateRangeLabel(String dateRangeLabel);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java
new file mode 100644
index 0000000..9d8f1a6
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/ServiceLogSearchParamDefinition.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.FIND_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.KEYWORD_TYPE_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.SOURCE_LOG_ID_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.TOKEN_D;
+
+public interface ServiceLogSearchParamDefinition {
+
+  String getKeyWord();
+
+  @ApiParam(value = FIND_D, name = LogSearchConstants.REQUEST_PARAM_KEYWORD)
+  void setKeyWord(String keyWord);
+
+  String getSourceLogId();
+
+  @ApiParam(value = SOURCE_LOG_ID_D, name = LogSearchConstants.REQUEST_PARAM_SOURCE_LOG_ID)
+  void setSourceLogId(String sourceLogId);
+
+  String getKeywordType();
+
+  @ApiParam(value = KEYWORD_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_KEYWORD_TYPE)
+  void setKeywordType(String keywordType);
+
+  String getToken();
+
+  @ApiParam(value = TOKEN_D, name = LogSearchConstants.REQUEST_PARAM_TOKEN)
+  void setToken(String token);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java
new file mode 100644
index 0000000..3f493da
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UnitParamDefinition.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.UNIT_D;
+
+public interface UnitParamDefinition {
+
+  String getUnit();
+
+  @ApiParam(value = UNIT_D, name = LogSearchConstants.REQUEST_PARAM_UNIT)
+  void setUnit(String unit);
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UserConfigParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UserConfigParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UserConfigParamDefinition.java
new file mode 100644
index 0000000..23b350a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UserConfigParamDefinition.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigDescriptions.USER_ID_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigDescriptions.FILTER_NAME_D;
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigDescriptions.ROW_TYPE_D;
+
+public interface UserConfigParamDefinition {
+
+  String getUserId();
+
+  @ApiParam(value = USER_ID_D, name = LogSearchConstants.REQUEST_PARAM_USER_ID)
+  void setUserId(String userId);
+
+  String getFilterName();
+
+  @ApiParam(value = FILTER_NAME_D, name = LogSearchConstants.REQUEST_PARAM_FILTER_NAME)
+  void setFilterName(String filterName);
+
+  String getRowType();
+
+  @ApiParam(value = ROW_TYPE_D, name = LogSearchConstants.REQUEST_PARAM_ROW_TYPE)
+  void setRowType(String rowType);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java
new file mode 100644
index 0000000..aa2be71
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/UtcOffsetParamDefinition.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.UTC_OFFSET_D;
+
+public interface UtcOffsetParamDefinition {
+
+  String getUtcOffset();
+
+  @ApiParam(value = UTC_OFFSET_D, name = LogSearchConstants.REQUEST_PARAM_UTC_OFFSET)
+  void setUtcOffset(String utcOffset);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AnyGraphRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AnyGraphRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AnyGraphRequest.java
new file mode 100644
index 0000000..41da712
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AnyGraphRequest.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.AnyGraphParamDefinition;
+import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition;
+import org.apache.ambari.logsearch.model.request.UnitParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class AnyGraphRequest extends CommonSearchRequest
+  implements AnyGraphParamDefinition, DateRangeParamDefinition, UnitParamDefinition{
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_XAXIS)
+  private String xAxis;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_YAXIS)
+  private String yAxis;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_STACK_BY)
+  private String stackBy;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FROM)
+  private String from;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TO)
+  private String to;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT)
+  private String unit;
+
+  @Override
+  public String getxAxis() {
+    return xAxis;
+  }
+
+  @Override
+  public void setxAxis(String xAxis) {
+    this.xAxis = xAxis;
+  }
+
+  @Override
+  public String getyAxis() {
+    return yAxis;
+  }
+
+  @Override
+  public void setyAxis(String yAxis) {
+    this.yAxis = yAxis;
+  }
+
+  @Override
+  public String getStackBy() {
+    return stackBy;
+  }
+
+  @Override
+  public void setStackBy(String stackBy) {
+    this.stackBy = stackBy;
+  }
+
+  @Override
+  public String getFrom() {
+    return from;
+  }
+
+  @Override
+  public void setFrom(String from) {
+    this.from = from;
+  }
+
+  @Override
+  public String getTo() {
+    return to;
+  }
+
+  @Override
+  public void setTo(String to) {
+    this.to = to;
+  }
+
+  @Override
+  public String getUnit() {
+    return unit;
+  }
+
+  @Override
+  public void setUnit(String unit) {
+    this.unit = unit;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java
new file mode 100644
index 0000000..91e7d1e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditBarGraphRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.UnitParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class AuditBarGraphRequest extends BaseAuditLogRequest implements UnitParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT)
+  private String unit;
+
+  @Override
+  public String getUnit() {
+    return unit;
+  }
+
+  @Override
+  public void setUnit(String unit) {
+    this.unit = unit;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java
new file mode 100644
index 0000000..8dd13dc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditLogRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LastPageParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class AuditLogRequest extends BaseAuditLogRequest implements LastPageParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_LAST_PAGE)
+  private boolean isLastPage;
+
+  @Override
+  public boolean isLastPage() {
+    return isLastPage;
+  }
+
+  @Override
+  public void setLastPage(boolean lastPage) {
+    isLastPage = lastPage;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseAuditLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseAuditLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseAuditLogRequest.java
new file mode 100644
index 0000000..74b4ab7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseAuditLogRequest.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class BaseAuditLogRequest extends BaseLogRequest implements DateRangeParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FROM)
+  private String from;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TO)
+  private String to;
+
+  @Override
+  public String getFrom() {
+    return from;
+  }
+
+  @Override
+  public void setFrom(String from) {
+    this.from = from;
+  }
+
+  @Override
+  public String getTo() {
+    return to;
+  }
+
+  @Override
+  public void setTo(String to) {
+    this.to = to;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java
new file mode 100644
index 0000000..5a96991
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseLogRequest.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LogParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class BaseLogRequest extends QueryRequest implements LogParamDefinition {
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_COLUMN_QUERY)
+  private String columnQuery;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_I_MESSAGE)
+  private String iMessage;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_G_E_MESSAGE)
+  private String gEMessage;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_MUST_BE)
+  private String mustBe;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_MUST_NOT)
+  private String mustNot;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_EXCLUDE_QUERY)
+  private String excludeQuery;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_INCLUDE_QUERY)
+  private String includeQuery;
+
+  @Override
+  public String getColumnQuery() {
+    return columnQuery;
+  }
+
+  @Override
+  public void setColumnQuery(String columnQuery) {
+    this.columnQuery = columnQuery;
+  }
+
+  @Override
+  public String getiMessage() {
+    return iMessage;
+  }
+
+  @Override
+  public void setiMessage(String iMessage) {
+    this.iMessage = iMessage;
+  }
+
+  @Override
+  public String getgEMessage() {
+    return gEMessage;
+  }
+
+  @Override
+  public void setgEMessage(String gEMessage) {
+    this.gEMessage = gEMessage;
+  }
+
+  @Override
+  public String getMustBe() {
+    return mustBe;
+  }
+
+  @Override
+  public void setMustBe(String mustBe) {
+    this.mustBe = mustBe;
+  }
+
+  @Override
+  public String getMustNot() {
+    return mustNot;
+  }
+
+  @Override
+  public void setMustNot(String mustNot) {
+    this.mustNot = mustNot;
+  }
+
+  @Override
+  public String getIncludeQuery() {
+    return includeQuery;
+  }
+
+  @Override
+  public void setIncludeQuery(String includeQuery) {
+    this.includeQuery = includeQuery;
+  }
+
+  @Override
+  public String getExcludeQuery() {
+    return excludeQuery;
+  }
+
+  @Override
+  public void setExcludeQuery(String excludeQuery) {
+    this.excludeQuery = excludeQuery;
+  }
+}


[36/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
deleted file mode 100644
index e227c6c..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.manager;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.AuditSolrDao;
-import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
-import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VLogFile;
-import org.apache.ambari.logsearch.view.VLogFileList;
-import org.apache.ambari.logsearch.view.VSolrLogList;
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.FacetField;
-import org.apache.solr.client.solrj.response.FacetField.Count;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrException;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-
-@Component
-public class LogFileMgr extends MgrBase {
-
-  private static final Logger logger = Logger.getLogger(LogFileMgr.class);
-
-  @Autowired
-  private ServiceLogsSolrDao serviceLogsSolrDao;
-  @Autowired
-  private AuditSolrDao auditSolrDao;
-
-  public String searchLogFiles(SearchCriteria searchCriteria) {
-    VLogFileList logFileList = new VLogFileList();
-    List<VLogFile> logFiles = new ArrayList<VLogFile>();
-    String componentName = (String) searchCriteria.getParamValue("component");
-    String host = (String) searchCriteria.getParamValue("host");
-    int minCount = 1;// to remove zero count facet
-    SolrQuery solrQuery = new SolrQuery();
-    SolrUtil.setMainQuery(solrQuery, null);
-    SolrUtil.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
-    // adding filter
-    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, componentName);
-    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, host);
-    try {
-      String logType = (String) searchCriteria.getParamValue("logType");
-      if (StringUtils.isBlank(logType)) {
-        logType = LogType.SERVICE.name();// default is service Log
-      }
-      SolrDaoBase daoMgr = null;
-      if (logType.equalsIgnoreCase(LogType.SERVICE.name())) {
-        daoMgr = serviceLogsSolrDao;
-      } else if (logType.equalsIgnoreCase(LogType.AUDIT.name())) {
-        daoMgr = auditSolrDao;
-      } else {
-        throw RESTErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
-      }
-      QueryResponse queryResponse = daoMgr.process(solrQuery);
-      if (queryResponse.getFacetField(LogSearchConstants.SOLR_PATH) != null) {
-        FacetField queryFacetField = queryResponse.getFacetField(LogSearchConstants.SOLR_PATH);
-        if (queryFacetField != null) {
-          List<Count> countList = queryFacetField.getValues();
-          for (Count count : countList) {
-            VLogFile vLogFile = new VLogFile();
-            String filePath = count.getName();
-            String fileName = FilenameUtils.getName(filePath);
-            vLogFile.setPath(filePath);
-            vLogFile.setName(fileName);
-            logFiles.add(vLogFile);
-          }
-        }
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error in solr query  :" + e.getLocalizedMessage() + "\n Query :" + solrQuery.toQueryString(), e.getCause());
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-    logFileList.setLogFiles(logFiles);
-    String jsonStr = "";
-    jsonStr = convertObjToString(logFileList);
-
-    return jsonStr;
-  }
-
-  public String getLogFileTail(SearchCriteria searchCriteria) {
-    String host = (String) searchCriteria.getParamValue("host");
-    String logFile = (String) searchCriteria.getParamValue("name");
-    String component = (String) searchCriteria.getParamValue("component");
-    String tailSize = (String) searchCriteria.getParamValue("tailSize");
-    if (StringUtils.isBlank(host)) {
-      throw RESTErrorUtil.createRESTException("missing Host Name", MessageEnums.ERROR_SYSTEM);
-    }
-    tailSize = (StringUtils.isBlank(tailSize)) ? "10" : tailSize;
-    SolrQuery logFileTailQuery = new SolrQuery();
-    try {
-      int tail = Integer.parseInt(tailSize);
-      tail = tail > 100 ? 100 : tail;
-      SolrUtil.setMainQuery(logFileTailQuery, null);
-      queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_HOST, host);
-      if (!StringUtils.isBlank(logFile)) {
-        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_PATH, SolrUtil.makeSolrSearchString(logFile));
-      } else if (!StringUtils.isBlank(component)) {
-        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_COMPONENT, component);
-      } else {
-        throw RESTErrorUtil.createRESTException("component or logfile parameter must be present", MessageEnums.ERROR_SYSTEM);
-      }
-
-      SolrUtil.setRowCount(logFileTailQuery, tail);
-      queryGenerator.setSortOrderDefaultServiceLog(logFileTailQuery, new SearchCriteria());
-      VSolrLogList solrLogList = getLogAsPaginationProvided(logFileTailQuery, serviceLogsSolrDao);
-      return convertObjToString(solrLogList);
-
-    } catch (NumberFormatException ne) {
-
-      throw RESTErrorUtil.createRESTException(ne.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
-
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
deleted file mode 100644
index 53e0aab..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ /dev/null
@@ -1,1896 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.manager;
-
-import java.io.IOException;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collection;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TimeZone;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-
-import javax.ws.rs.core.Response;
-
-import org.apache.ambari.logsearch.common.ConfigHelper;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
-import org.apache.ambari.logsearch.graph.GraphDataGenerator;
-import org.apache.ambari.logsearch.query.QueryGenerationBase;
-import org.apache.ambari.logsearch.util.BizUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.FileUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VBarDataList;
-import org.apache.ambari.logsearch.view.VBarGraphData;
-import org.apache.ambari.logsearch.view.VCount;
-import org.apache.ambari.logsearch.view.VCountList;
-import org.apache.ambari.logsearch.view.VGraphData;
-import org.apache.ambari.logsearch.view.VGraphInfo;
-import org.apache.ambari.logsearch.view.VGroupList;
-import org.apache.ambari.logsearch.view.VNameValue;
-import org.apache.ambari.logsearch.view.VNameValueList;
-import org.apache.ambari.logsearch.view.VNode;
-import org.apache.ambari.logsearch.view.VNodeList;
-import org.apache.ambari.logsearch.view.VSolrLogList;
-import org.apache.ambari.logsearch.view.VSummary;
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DateUtils;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.FacetField;
-import org.apache.solr.client.solrj.response.FacetField.Count;
-import org.apache.solr.client.solrj.response.PivotField;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.client.solrj.response.RangeFacet;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Component;
-
-import com.google.common.collect.Lists;
-
-@Component
-public class LogsMgr extends MgrBase {
-  private static final Logger logger = Logger.getLogger(LogsMgr.class);
-
-  private static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
-
-  private static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
-  
-  private static enum CONDITION {
-    OR, AND
-  }
-
-  @Autowired
-  private ServiceLogsSolrDao serviceLogsSolrDao;
-  @Autowired
-  private GraphDataGenerator graphDataGenerator;
-
-  public String searchLogs(SearchCriteria searchCriteria) {
-    String keyword = (String) searchCriteria.getParamValue("keyword");
-    String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    String lastPage = (String)  searchCriteria.getParamValue("isLastPage");
-    Boolean isLastPage = Boolean.parseBoolean(lastPage);
-
-    if (!StringUtils.isBlank(keyword)) {
-      try {
-        return getPageByKeyword(searchCriteria);
-      } catch (SolrException | SolrServerException e) {
-        logger.error("Error while getting keyword=" + keyword, e);
-        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-      }
-    } else if (!StringUtils.isBlank(logId)) {
-      try {
-        return getPageByLogId(searchCriteria);
-      } catch (SolrException e) {
-        logger.error("Error while getting keyword=" + keyword, e);
-        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-      }
-    } else if (isLastPage) {
-      SolrQuery lastPageQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-      VSolrLogList collection = getLastPage(searchCriteria,LogSearchConstants.LOGTIME,serviceLogsSolrDao,lastPageQuery);
-      if(collection == null){
-        collection = new VSolrLogList();
-      }
-      return convertObjToString(collection);
-    } else {
-      SolrQuery solrQuery = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-
-      solrQuery.setParam("event", "/service/logs");
-
-      VSolrLogList collection = getLogAsPaginationProvided(solrQuery,
-          serviceLogsSolrDao);
-      return convertObjToString(collection);
-    }
-  }
-
-  public String getHosts() {
-    return getFields(LogSearchConstants.SOLR_HOST);
-  }
-  
-  private String getFields(String field){
-
-    SolrQuery solrQuery = new SolrQuery();
-    VGroupList collection = new VGroupList();
-    SolrUtil.setMainQuery(solrQuery, null);
-    SolrUtil.setFacetField(solrQuery,
-        field);
-    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if(response == null){
-        return convertObjToString(collection);
-      }
-      FacetField facetField = response
-        .getFacetField(field);
-      if (facetField == null){
-        return convertObjToString(collection);
-      }
-      List<Count> fieldList = facetField.getValues();
-      if (fieldList == null){
-        return convertObjToString(collection);
-      }
-      SolrDocumentList docList = response.getResults();
-      if(docList == null){
-        return convertObjToString(collection);
-      }
-      String temp = "";
-      for (Count cnt : fieldList) {
-        SolrDocument solrDoc = new SolrDocument();
-        temp = cnt.getName();
-        solrDoc.put(field, temp);
-        docList.add(solrDoc);
-      }
-
-      collection.setGroupDocuments(docList);
-      if(!docList.isEmpty()){
-        collection.setStartIndex((int) docList.getStart());
-        collection.setTotalCount(docList.getNumFound());
-      }
-      return convertObjToString(collection);
-    } catch (IOException | SolrServerException | SolrException e) {
-      logger.error(e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-  }
-
-  public String getComponents() {
-    return getFields(LogSearchConstants.SOLR_COMPONENT);
-  }
-
-  public String getAggregatedInfo(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    String hierarchy = "host,type,level";
-    VGraphInfo graphInfo = new VGraphInfo();
-    try {
-      SolrUtil.setMainQuery(solrQuery, null);
-      SolrUtil.setFacetPivot(solrQuery, 1, hierarchy);
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if (response == null) {
-        return convertObjToString(graphInfo);
-      }
-
-      List<List<PivotField>> hirarchicalPivotField = new ArrayList<List<PivotField>>();
-      List<VGraphData> dataList = new ArrayList<VGraphData>();
-      NamedList<List<PivotField>> namedList = response.getFacetPivot();
-      if (namedList != null) {
-        hirarchicalPivotField = namedList.getAll(hierarchy);
-      }
-      if (!hirarchicalPivotField.isEmpty()) {
-        dataList = buidGraphData(hirarchicalPivotField.get(0));
-      }
-      if (!dataList.isEmpty()) {
-        graphInfo.setGraphData(dataList);
-      }
-
-      return convertObjToString(graphInfo);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public List<VGraphData> buidGraphData(List<PivotField> pivotFields) {
-    List<VGraphData> logList = new ArrayList<VGraphData>();
-    if (pivotFields != null) {
-      for (PivotField pivotField : pivotFields) {
-        if (pivotField != null) {
-          VGraphData logLevel = new VGraphData();
-          logLevel.setName("" + pivotField.getValue());
-          logLevel.setCount(Long.valueOf(pivotField.getCount()));
-          if (pivotField.getPivot() != null) {
-            logLevel.setDataList(buidGraphData(pivotField.getPivot()));
-          }
-          logList.add(logLevel);
-        }
-      }
-    }
-    return logList;
-  }
-
-  public VCountList getFieldCount(String field){
-    VCountList collection = new VCountList();
-    List<VCount> vCounts = new ArrayList<VCount>();
-    SolrQuery solrQuery = new SolrQuery();
-    SolrUtil.setMainQuery(solrQuery, null);
-    if(field == null){
-      return collection;
-    }
-    SolrUtil.setFacetField(solrQuery, field);
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if (response == null){
-        return collection;
-      }
-      FacetField facetFields = response.getFacetField(field);
-      if (facetFields == null){
-        return collection;
-      }
-      List<Count> fieldList = facetFields.getValues();
-
-      if(fieldList == null){
-        return collection;
-      }
-
-      for (Count cnt : fieldList) {
-        if (cnt != null) {
-          VCount vCount = new VCount();
-          vCount.setName(cnt.getName());
-          vCount.setCount(cnt.getCount());
-          vCounts.add(vCount);
-        }
-      }
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    collection.setCounts(vCounts);
-    return collection;
-  }
-  
-  public VCountList getLogLevelCount() {
-    return getFieldCount(LogSearchConstants.SOLR_LEVEL);
-  }
-
-  public VCountList getComponentsCount() {
-    return getFieldCount(LogSearchConstants.SOLR_COMPONENT);
-  }
-
-  public VCountList getHostsCount() {
-    return getFieldCount(LogSearchConstants.SOLR_HOST);
-  }
-
-  public List<VNode> buidTreeData(List<PivotField> pivotFields,
-                                  List<PivotField> pivotFieldHost, SolrQuery query,
-                                  String firstPriority, String secondPriority) {
-    List<VNode> extensionTree = new ArrayList<VNode>();
-    String hostQuery = null;
-    if (pivotFields != null) {
-      // For Host
-      for (PivotField pivotHost : pivotFields) {
-        if (pivotHost != null) {
-          VNode hostNode = new VNode();
-          String name = (pivotHost.getValue() == null ? "" : ""+ pivotHost.getValue());
-          String value = "" + pivotHost.getCount();
-          if(!StringUtils.isBlank(name)){
-            hostNode.setName(name);
-          }
-          if(!StringUtils.isBlank(value)){
-            hostNode.setValue(value);
-          }
-          if(!StringUtils.isBlank(firstPriority)){
-            hostNode.setType(firstPriority);
-          }
-
-          hostNode.setParent(true);
-          hostNode.setRoot(true);
-          PivotField hostPivot = null;
-          for (PivotField searchHost : pivotFieldHost) {
-            if (!StringUtils.isBlank(hostNode.getName())
-                && hostNode.getName().equals(searchHost.getValue())) {
-              hostPivot = searchHost;
-              break;
-            }
-          }
-          List<PivotField> pivotLevelHost = hostPivot.getPivot();
-          if (pivotLevelHost != null) {
-            Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
-            for (PivotField pivotLevel : pivotLevelHost) {
-              if (pivotLevel != null) {
-                VNameValue vnameValue = new VNameValue();
-                String levelName = (pivotLevel.getValue() == null ? "" : ""
-                    + pivotLevel.getValue());
-                vnameValue.setName(levelName.toUpperCase());
-                vnameValue.setValue("" + pivotLevel.getCount());
-                logLevelCount.add(vnameValue);
-              }
-            }
-            hostNode.setLogLevelCount(logLevelCount);
-          }
-
-          query.addFilterQuery(hostQuery);
-          List<PivotField> pivotComponents = pivotHost.getPivot();
-          // For Components
-          if (pivotComponents != null) {
-            Collection<VNode> componentNodes = new ArrayList<VNode>();
-            for (PivotField pivotComp : pivotComponents) {
-              if (pivotComp != null) {
-                VNode compNode = new VNode();
-                String compName = (pivotComp.getValue() == null ? "" : ""
-                    + pivotComp.getValue());
-                compNode.setName(compName);
-                if (!StringUtils.isBlank(secondPriority)) {
-                  compNode.setType(secondPriority);
-                }
-                compNode.setValue("" + pivotComp.getCount());
-                compNode.setParent(false);
-                compNode.setRoot(false);
-                List<PivotField> pivotLevels = pivotComp.getPivot();
-                if (pivotLevels != null) {
-                  Collection<VNameValue> logLevelCount = new ArrayList<VNameValue>();
-                  for (PivotField pivotLevel : pivotLevels) {
-                    if (pivotLevel != null) {
-                      VNameValue vnameValue = new VNameValue();
-                      String compLevel = pivotLevel.getValue() == null ? ""
-                          : "" + pivotLevel.getValue();
-                      vnameValue.setName((compLevel).toUpperCase());
-
-                      vnameValue.setValue("" + pivotLevel.getCount());
-                      logLevelCount.add(vnameValue);
-                    }
-                  }
-                  compNode.setLogLevelCount(logLevelCount);
-                }
-                componentNodes.add(compNode);
-              }}
-            hostNode.setChilds(componentNodes);
-          }
-          extensionTree.add(hostNode);
-        }}
-    }
-
-    return extensionTree;
-  }
-
-  public VNodeList getTreeExtension(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.setParam("event", "/getTreeExtension");
-
-    if (searchCriteria.getSortBy() == null) {
-      searchCriteria.setSortBy(LogSearchConstants.SOLR_HOST);
-      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
-    }
-    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
-    String hostName = ""
-      + ((searchCriteria.getParamValue("hostName") == null) ? ""
-      : searchCriteria.getParamValue("hostName"));
-    if (!StringUtils.isBlank(hostName)){
-      solrQuery.addFilterQuery(LogSearchConstants.SOLR_HOST + ":*"
-        + hostName + "*");
-    }
-    String firstHirarchy = "host,type,level";
-    String secondHirarchy = "host,level";
-    VNodeList list = new VNodeList();
-    try {
-
-      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
-        secondHirarchy);
-
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      List<List<PivotField>> listFirstHirarchicalPivotFields = new ArrayList<List<PivotField>>();
-      NamedList<List<PivotField>> firstNamedList = response
-        .getFacetPivot();
-      if (firstNamedList != null) {
-        listFirstHirarchicalPivotFields = firstNamedList
-          .getAll(firstHirarchy);
-      }
-      List<List<PivotField>> listSecondHirarchicalPivotFields = new ArrayList<List<PivotField>>();
-      NamedList<List<PivotField>> secondNamedList = response
-        .getFacetPivot();
-      if (secondNamedList != null) {
-        listSecondHirarchicalPivotFields = secondNamedList
-          .getAll(secondHirarchy);
-      }
-      List<PivotField> firstHirarchicalPivotFields = new ArrayList<PivotField>();
-      List<PivotField> secondHirarchicalPivotFields = new ArrayList<PivotField>();
-      if (!listFirstHirarchicalPivotFields.isEmpty()) {
-        firstHirarchicalPivotFields = listFirstHirarchicalPivotFields
-          .get(0);
-      }
-      if (!listSecondHirarchicalPivotFields.isEmpty()) {
-        secondHirarchicalPivotFields = listSecondHirarchicalPivotFields
-          .get(0);
-      }
-      List<VNode> dataList = buidTreeData(firstHirarchicalPivotFields,
-        secondHirarchicalPivotFields, solrQuery,
-        LogSearchConstants.HOST, LogSearchConstants.COMPONENT);
-
-      list.setvNodeList(dataList);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    return list;
-  }
-
-  public String getHostListByComponent(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.setParam("event", "/service/hosts/component");
-
-    VNodeList list = new VNodeList();
-    if (searchCriteria.getSortBy() == null) {
-      searchCriteria.setSortBy(LogSearchConstants.SOLR_HOST);
-      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
-    }
-    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
-    String componentName = ""
-      + ((searchCriteria.getParamValue("componentName") == null) ? ""
-      : searchCriteria.getParamValue("componentName"));
-    if (!StringUtils.isBlank(componentName)){
-      solrQuery.addFilterQuery(LogSearchConstants.SOLR_COMPONENT + ":"
-        + componentName);
-    } else {
-      return convertObjToString(list);
-    }
-
-    String firstHirarchy = "type,host,level";
-    String secondHirarchy = "type,level";
-
-    try {
-      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
-        secondHirarchy);
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      List<List<PivotField>> firstHirarchicalPivotFields = null;
-      List<List<PivotField>> secondHirarchicalPivotFields = null;
-      NamedList<List<PivotField>> firstNamedList = response
-        .getFacetPivot();
-      if (firstNamedList != null) {
-        firstHirarchicalPivotFields = firstNamedList
-          .getAll(firstHirarchy);
-        secondHirarchicalPivotFields = firstNamedList
-          .getAll(secondHirarchy);
-      }
-
-      if (firstHirarchicalPivotFields == null
-        || secondHirarchicalPivotFields == null) {
-        return convertObjToString(list);
-      }
-
-      List<VNode> dataList = buidTreeData(
-        firstHirarchicalPivotFields.get(0),
-        secondHirarchicalPivotFields.get(0), solrQuery,
-        LogSearchConstants.COMPONENT, LogSearchConstants.HOST);
-      if(dataList == null){
-        return convertObjToString(list);
-      }
-
-      list.setvNodeList(dataList);
-      return convertObjToString(list);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public VNameValueList getLogsLevelCount(SearchCriteria sc) {
-    VNameValueList nameValueList = new VNameValueList();
-    SolrQuery query = queryGenerator.commonServiceFilterQuery(sc);
-    query.setParam("event", "/service/logs/levels/counts/namevalues");
-    List<VNameValue> logsCounts = getLogLevelFacets(query);
-    nameValueList.setVNameValues(logsCounts);
-
-    return nameValueList;
-  }
-
-  public List<VNameValue> getLogLevelFacets(SolrQuery query) {
-    String defalutValue = "0";
-    HashMap<String, String> map = new HashMap<String, String>();
-    List<VNameValue> logsCounts = new ArrayList<VNameValue>();
-    try {
-      SolrUtil.setFacetField(query, LogSearchConstants.SOLR_LEVEL);
-      List<Count> logLevelCounts = getFacetCounts(query,
-          LogSearchConstants.SOLR_LEVEL);
-      if (logLevelCounts == null) {
-        return logsCounts;
-      }
-      for (Count count : logLevelCounts) {
-        map.put(count.getName().toUpperCase(), "" + count.getCount());
-      }
-      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
-        VNameValue nameValue = new VNameValue();
-        String value = map.get(level);
-        if (StringUtils.isBlank(value)) {
-          value = defalutValue;
-        }
-        nameValue.setName(level);
-        nameValue.setValue(value);
-        logsCounts.add(nameValue);
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + query, e);
-    }
-    return logsCounts;
-  }
-
-  // Get Facet Count According to FacetFeild
-  public List<Count> getFacetCounts(SolrQuery solrQuery, String facetField)
-    throws SolrServerException, IOException, SolrException {
-    List<Count> list = new ArrayList<FacetField.Count>();
-
-    QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-    if(response == null){
-      return list;
-    }
-
-    FacetField field = response.getFacetField(facetField);
-    if (field == null) {
-      return list;
-    }
-    list = field.getValues();
-
-
-    return list;
-  }
-
-  public String getPageByKeyword(SearchCriteria searchCriteria)
-    throws SolrServerException {
-    String defaultChoice = "0";
-
-    String key = (String) searchCriteria.getParamValue("keyword");
-    if(StringUtils.isBlank(key)){
-      throw RESTErrorUtil.createRESTException("Keyword was not given",
-          MessageEnums.DATA_NOT_FOUND);
-    }
-
-    String keyword = SolrUtil.escapeForStandardTokenizer(key);
-
-    if(keyword.startsWith("\"") && keyword.endsWith("\"")){
-      keyword = keyword.substring(1);
-      keyword = keyword.substring(0, keyword.length()-1);
-    }
-    keyword = "*" + keyword + "*";
-
-
-    String keyType = (String) searchCriteria.getParamValue("keywordType");
-    QueryResponse queryResponse = null;
-
-    if (!defaultChoice.equals(keyType)) {
-      try {
-        int currentPageNumber = searchCriteria.getPage();
-        int maxRows = searchCriteria.getMaxRows();
-        String nextPageLogID = "";
-
-        int lastLogIndexNumber = ((currentPageNumber + 1)
-          * maxRows);
-        String nextPageLogTime = "";
-
-
-        // Next Page Start Time Calculation
-        SolrQuery nextPageLogTimeQuery = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        nextPageLogTimeQuery.remove("start");
-        nextPageLogTimeQuery.remove("rows");
-        nextPageLogTimeQuery.setStart(lastLogIndexNumber);
-        nextPageLogTimeQuery.setRows(1);
-
-        queryResponse = serviceLogsSolrDao.process(
-            nextPageLogTimeQuery);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList docList = queryResponse.getResults();
-        if(docList ==null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocument solrDoc = docList.get(0);
-
-        Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
-        if(logDate == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        nextPageLogTime = DateUtil
-          .convertDateWithMillisecondsToSolrDate(logDate);
-        nextPageLogID = ""
-          + solrDoc.get(LogSearchConstants.ID);
-
-        if (StringUtils.isBlank(nextPageLogID)){
-          nextPageLogID = "0";
-        }
-
-        String filterQueryListIds = "";
-        // Remove the same Time Ids
-        SolrQuery listRemoveIds = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        listRemoveIds.remove("start");
-        listRemoveIds.remove("rows");
-        queryGenerator.setSingleIncludeFilter(listRemoveIds,
-          LogSearchConstants.LOGTIME, "\"" + nextPageLogTime + "\"");
-        queryGenerator.setSingleExcludeFilter(listRemoveIds,
-          LogSearchConstants.ID, nextPageLogID);
-        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
-        queryResponse = serviceLogsSolrDao.process(
-            listRemoveIds);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList docListIds = queryResponse.getResults();
-        if(docListIds ==null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        boolean isFirst = true;
-        for (SolrDocument solrDocId :  docListIds ) {
-          String id = "" + solrDocId.get(LogSearchConstants.ID);
-          if (isFirst) {
-            filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
-            isFirst = false;
-          } else {
-            filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
-          }
-        }
-
-        // Keyword Sequence Number Calculation
-        String endTime = (String) searchCriteria.getParamValue("to");
-        String startTime = (String) searchCriteria
-          .getParamValue("from");
-        SolrQuery logTimeThroughRangeQuery = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        logTimeThroughRangeQuery.remove("start");
-        logTimeThroughRangeQuery.remove("rows");
-        logTimeThroughRangeQuery.setRows(1);
-        if (!StringUtils.isBlank(filterQueryListIds)){
-          logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
-        }
-
-        String sortByType = searchCriteria.getSortType();
-
-        if (!StringUtils.isBlank(sortByType) && sortByType
-          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-
-          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
-            LogSearchConstants.LOGTIME, nextPageLogTime,
-            endTime);
-          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
-            LogSearchConstants.LOGTIME + " "
-              + LogSearchConstants.ASCENDING_ORDER);
-
-        } else {
-
-          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
-            LogSearchConstants.LOGTIME, startTime,
-            nextPageLogTime);
-          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
-            LogSearchConstants.LOGTIME + " "
-              + LogSearchConstants.DESCENDING_ORDER);
-        }
-        queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
-          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
-
-
-        queryResponse = serviceLogsSolrDao.process(
-            logTimeThroughRangeQuery);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList documentList = queryResponse.getResults();
-        if(documentList ==null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocument solrDocument = new SolrDocument();
-        if (!documentList.isEmpty()){
-          solrDocument = documentList.get(0);
-        }
-
-        Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
-        if(keywordLogDate == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        String originalKeywordDate = DateUtil
-          .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-        String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
-
-        // Getting Range Count from StartTime To Keyword Log Time
-        SolrQuery rangeLogQuery = nextPageLogTimeQuery.getCopy();
-        rangeLogQuery.remove("start");
-        rangeLogQuery.remove("rows");
-
-        if (!StringUtils.isBlank(sortByType) && sortByType
-          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, 1);
-          String keywordDateTime = DateUtil
-            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-          queryGenerator.setSingleRangeFilter(rangeLogQuery,
-            LogSearchConstants.LOGTIME, startTime,
-            keywordDateTime);
-        } else {
-          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, -1);
-          String keywordDateTime = DateUtil
-            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-          queryGenerator.setSingleRangeFilter(rangeLogQuery,
-            LogSearchConstants.LOGTIME, keywordDateTime,
-            endTime);
-        }
-
-
-        long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
-
-
-        //Adding numbers on
-
-
-        try {
-          SolrQuery sameIdQuery = queryGenerator
-            .commonServiceFilterQuery(searchCriteria);
-          queryGenerator.setSingleIncludeFilter(sameIdQuery,
-            LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
-          SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
-            .getResults();
-          for (SolrDocument solrDocumenent : sameQueryDocList) {
-            String id = (String) solrDocumenent
-              .getFieldValue(LogSearchConstants.ID);
-            countNumberLogs++;
-           
-            if (StringUtils.isBlank(id) && id.equals(keywordId)){
-              break;
-            }
-          }
-        } catch (SolrException | SolrServerException | IOException e) {
-          logger.error(e);
-        }
-
-        int start = (int) ((countNumberLogs / maxRows) * maxRows);
-        SolrQuery logIdQuery = nextPageLogTimeQuery.getCopy();
-        rangeLogQuery.remove("start");
-        rangeLogQuery.remove("rows");
-        logIdQuery.setStart(start);
-        logIdQuery.setRows(searchCriteria.getMaxRows());
-        VSolrLogList vSolrLogList = getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
-        return convertObjToString(vSolrLogList);
-
-      } catch (Exception e) {
-        //do nothing
-      }
-
-    } else {
-      try {
-        int currentPageNumber = searchCriteria.getPage();
-        int maxRows = searchCriteria.getMaxRows();
-
-        if (currentPageNumber == 0) {
-          throw RESTErrorUtil.createRESTException("This is first Page Not",
-            MessageEnums.DATA_NOT_FOUND);
-        }
-
-        int firstLogCurrentPage = (currentPageNumber * maxRows);
-        String lastLogsLogTime = "";
-
-        // Next Page Start Time Calculation
-        SolrQuery lastLogTime = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        lastLogTime.remove("start");
-        lastLogTime.remove("rows");
-
-        lastLogTime.setStart(firstLogCurrentPage);
-        lastLogTime.setRows(1);
-
-        queryResponse = serviceLogsSolrDao.process(
-            lastLogTime);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList docList = queryResponse.getResults();
-        if(docList ==null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        SolrDocument solrDoc = docList.get(0);
-
-        Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
-        String sortByType = searchCriteria.getSortType();
-        lastLogsLogTime = DateUtil
-          .convertDateWithMillisecondsToSolrDate(logDate);
-        String lastLogsLogId = ""
-          + solrDoc.get(LogSearchConstants.ID);
-
-
-        String filterQueryListIds = "";
-        // Remove the same Time Ids
-        SolrQuery listRemoveIds = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        listRemoveIds.remove("start");
-        listRemoveIds.remove("rows");
-        queryGenerator.setSingleIncludeFilter(listRemoveIds,
-          LogSearchConstants.LOGTIME, "\"" + lastLogsLogTime + "\"");
-        queryGenerator.setSingleExcludeFilter(listRemoveIds,
-          LogSearchConstants.ID, lastLogsLogId);
-        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
-        queryResponse = serviceLogsSolrDao.process(
-            lastLogTime);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList docListIds = queryResponse.getResults();
-        if(docListIds == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        boolean isFirst = true;
-        for (SolrDocument solrDocId : docListIds) {
-          if (solrDocId != null) {
-            String id = "" + solrDocId.get(LogSearchConstants.ID);
-            if (isFirst) {
-              filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
-              isFirst = false;
-            } else {
-              filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":"
-                  + id;
-            }
-          }
-        }
-
-
-        // Keyword LogTime Calculation
-        String endTime = (String) searchCriteria.getParamValue("to");
-        String startTime = (String) searchCriteria
-          .getParamValue("from");
-        SolrQuery logTimeThroughRangeQuery = queryGenerator
-          .commonServiceFilterQuery(searchCriteria);
-        logTimeThroughRangeQuery.remove("start");
-        logTimeThroughRangeQuery.remove("rows");
-        logTimeThroughRangeQuery.setRows(1);
-        queryGenerator.setSingleExcludeFilter(logTimeThroughRangeQuery,
-          LogSearchConstants.ID, lastLogsLogId);
-        if (!StringUtils.isBlank(filterQueryListIds)){
-          logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
-        }
-
-        if (!StringUtils.isBlank(sortByType) && sortByType
-          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-
-          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
-          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
-            LogSearchConstants.LOGTIME + " "
-              + LogSearchConstants.DESCENDING_ORDER);
-
-
-          queryGenerator.setSingleRangeFilter(
-            logTimeThroughRangeQuery,
-            LogSearchConstants.LOGTIME, startTime,
-            lastLogsLogTime);
-
-        } else {
-
-          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
-          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
-            LogSearchConstants.LOGTIME + " "
-              + LogSearchConstants.ASCENDING_ORDER);
-
-
-          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
-            LogSearchConstants.LOGTIME, lastLogsLogTime, endTime);
-        }
-        queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
-          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
-
-
-        queryResponse = serviceLogsSolrDao.process(
-            logTimeThroughRangeQuery);
-        if(queryResponse == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-
-        SolrDocumentList documentList = queryResponse.getResults();
-        if(documentList == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        SolrDocument solrDocument = new SolrDocument();
-        if (!documentList.isEmpty()){
-          solrDocument = documentList.get(0);
-        }
-
-        Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
-        if(keywordLogDate == null){
-          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-              MessageEnums.ERROR_SYSTEM);
-        }
-        String originalKeywordDate = DateUtil
-          .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-        String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
-
-        // Getting Range Count from StartTime To Keyword Log Time
-        SolrQuery rangeLogQuery = lastLogTime.getCopy();
-        rangeLogQuery.remove("start");
-        rangeLogQuery.remove("rows");
-
-        if (!StringUtils.isBlank(sortByType) && sortByType
-          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-       //   keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, 1);
-          String keywordDateTime = DateUtil
-            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-          queryGenerator.setSingleRangeFilter(rangeLogQuery,
-            LogSearchConstants.LOGTIME, startTime,
-            keywordDateTime);
-
-
-        } else {
-     //     keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, -1);
-          String keywordDateTime = DateUtil
-            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
-          queryGenerator.setSingleRangeFilter(rangeLogQuery,
-            LogSearchConstants.LOGTIME, keywordDateTime,
-            endTime);
-        }
-
-
-        long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
-
-        //Adding numbers on
-        try {
-          SolrQuery sameIdQuery = queryGenerator
-            .commonServiceFilterQuery(searchCriteria);
-          queryGenerator.setSingleIncludeFilter(sameIdQuery,
-            LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
-          SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
-            .getResults();
-          for (SolrDocument solrDocumenent : sameQueryDocList) {
-            if (solrDocumenent != null) {
-              String id = (String) solrDocumenent
-                  .getFieldValue(LogSearchConstants.ID);
-              countNumberLogs++;
-              if ( StringUtils.isBlank(id) && id.equals(keywordId)) {
-                break;
-              }
-            }
-          }
-        } catch (SolrException | SolrServerException | IOException e) {
-          logger.error(e);
-        }
-        int start = (int) ((countNumberLogs / maxRows) * maxRows);
-
-        SolrQuery logIdQuery = lastLogTime.getCopy();
-        rangeLogQuery.remove("start");
-        rangeLogQuery.remove("rows");
-        logIdQuery.setStart(start);
-        logIdQuery.setRows(searchCriteria.getMaxRows());
-        VSolrLogList vSolrLogList = getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
-        return convertObjToString(vSolrLogList);
-      } catch (Exception e) {
-        //do nothing
-      }
-
-    }
-    throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
-        MessageEnums.ERROR_SYSTEM);
-  }
-
-  private String getPageByLogId(SearchCriteria searchCriteria) {
-    VSolrLogList vSolrLogList = new VSolrLogList();
-    String endLogTime = (String) searchCriteria.getParamValue("to");
-    if(StringUtils.isBlank(endLogTime)){
-      return convertObjToString(vSolrLogList);
-    }
-    long startIndex = 0l;
-
-    String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    if(StringUtils.isBlank(logId)){
-      return convertObjToString(vSolrLogList);
-    }
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-
-    String endTimeMinusOneMilli = "";
-    String logTime = "";
-    try {
-
-      SolrQuery logTimeByIdQuery = new SolrQuery();
-      SolrUtil.setMainQuery(logTimeByIdQuery, null);
-      queryGenerator.setSingleIncludeFilter(logTimeByIdQuery,
-          LogSearchConstants.ID, logId);
-      SolrUtil.setRowCount(solrQuery, 1);
-
-      QueryResponse queryResponse = serviceLogsSolrDao
-          .process(logTimeByIdQuery);
-
-      if(queryResponse == null){
-        return convertObjToString(new VSolrLogList());
-      }
-
-      SolrDocumentList docList = queryResponse.getResults();
-      Date dateOfLogId = null;
-      if (docList != null && !docList.isEmpty()) {
-        SolrDocument dateLogIdDoc = docList.get(0);
-        if(dateLogIdDoc != null){
-          dateOfLogId = (Date) dateLogIdDoc.get(LogSearchConstants.LOGTIME);
-        }
-      }
-
-      if (dateOfLogId != null) {
-        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
-        Date endDate = DateUtils.addMilliseconds(dateOfLogId, 1);
-        endTimeMinusOneMilli = (String) DateUtil
-            .convertDateWithMillisecondsToSolrDate(endDate);
-      }
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-    }
-
-    try {
-      solrQuery.remove(LogSearchConstants.ID);
-      solrQuery.remove(LogSearchConstants.LOGTIME);
-      queryGenerator.setSingleRangeFilter(solrQuery,
-          LogSearchConstants.LOGTIME, endTimeMinusOneMilli, endLogTime);
-      SolrUtil.setRowCount(solrQuery, 0);
-      startIndex = countQuery(solrQuery,serviceLogsSolrDao);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-    }
-
-    try {
-      SolrQuery sameIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-      queryGenerator.setSingleIncludeFilter(sameIdQuery,
-          LogSearchConstants.LOGTIME, "\"" + logTime + "\"");
-      sameIdQuery.set("fl", LogSearchConstants.ID);
-
-      QueryResponse sameIdResponse = serviceLogsSolrDao.process(sameIdQuery);
-      SolrDocumentList docList = sameIdResponse.getResults();
-
-      for (SolrDocument solrDocumenent : docList) {
-        String id = (String) solrDocumenent
-            .getFieldValue(LogSearchConstants.ID);
-        startIndex++;
-        if (!StringUtils.isBlank(id)) {
-          if (id.equals(logId)) {
-            break;
-          }
-        }
-      }
-
-      SolrQuery logIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-      logIdQuery.remove("rows");
-      logIdQuery.remove("start");
-      int start = (int) ((startIndex / searchCriteria.getMaxRows()) * searchCriteria
-          .getMaxRows());
-      logIdQuery.setStart(start);
-      logIdQuery.setRows(searchCriteria.getMaxRows());
-      vSolrLogList = getLogAsPaginationProvided(logIdQuery,
-          serviceLogsSolrDao);
-      return convertObjToString(vSolrLogList);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e);
-    }
-
-    throw RESTErrorUtil.createRESTException("LogId not Found",
-        MessageEnums.ERROR_SYSTEM);
-  }
-
-  @SuppressWarnings("unchecked")
-  public List<VNameValue> getHistogramCounts(SolrQuery solrQuery,
-                                             String from, String to, String unit) {
-    List<VNameValue> logsCounts = new ArrayList<VNameValue>();
-    try {
-
-      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
-        from, to, unit);
-
-      List<RangeFacet.Count> logLevelCounts = null;
-
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if(response == null){
-        return logsCounts;
-      }
-      @SuppressWarnings("rawtypes")
-      List<RangeFacet> rangeFacetList = response.getFacetRanges();
-      if (rangeFacetList == null) {
-        return logsCounts;
-
-      }
-
-      @SuppressWarnings("rawtypes")
-      RangeFacet rangeFacet=rangeFacetList.get(0);
-      if (rangeFacet == null) {
-        return logsCounts;
-      }
-      logLevelCounts = rangeFacet.getCounts();
-
-      if(logLevelCounts == null){
-        return logsCounts;
-      }
-      for (RangeFacet.Count logCount : logLevelCounts) {
-        VNameValue nameValue = new VNameValue();
-        nameValue.setName(logCount.getValue());
-        nameValue.setValue("" + logCount.getCount());
-        logsCounts.add(nameValue);
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-    }
-    return logsCounts;
-  }
-
-  public List<Count> getFacetCountsByDate(SolrQuery solrQuery,
-                                          String facetField) throws SolrServerException, IOException,
-    SolrException {
-
-    QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-
-    FacetField field = response.getFacetDate(facetField);
-    return field.getValues();
-  }
-
-  @SuppressWarnings("unchecked")
-  public String getHistogramData(SearchCriteria searchCriteria) {
-    String deafalutValue = "0";
-    VBarDataList dataList = new VBarDataList();
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.set("event", "/audit/logs/histogram");
-    String from = getFrom((String) searchCriteria.getParamValue("from"));
-    String to = getTo((String) searchCriteria.getParamValue("to"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
-
-    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-
-    String jsonHistogramQuery = queryGenerator
-      .buildJSONFacetTermTimeRangeQuery(
-        LogSearchConstants.SOLR_LEVEL,
-        LogSearchConstants.LOGTIME, from, to, unit).replace(
-        "\\", "");
-
-    try {
-      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
-      SolrUtil.setRowCount(solrQuery,Integer.parseInt(deafalutValue));
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if (response == null){
-        return convertObjToString(dataList);
-      }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-        .getResponse().get("facets");
-
-      if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}")){
-        return convertObjToString(dataList);
-      }
-
-      extractValuesFromBuckets(jsonFacetResponse, "x", "y", histogramData);
-
-      Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-      List<VBarGraphData> graphDatas = new ArrayList<VBarGraphData>();
-      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
-        boolean isLevelPresent = false;
-        VBarGraphData vData1 = null;
-        for (VBarGraphData vData2 : histogramData) {
-          String name = vData2.getName();
-          if (level.contains(name)) {
-            isLevelPresent = true;
-            vData1 = vData2;
-            break;
-          }
-          if (vNameValues.isEmpty()) {
-            Collection<VNameValue> vNameValues2 = vData2
-              .getDataCount();
-            for (VNameValue value : vNameValues2) {
-              VNameValue value2 = new VNameValue();
-              value2.setValue(deafalutValue);
-              value2.setName(value.getName());
-              vNameValues.add(value2);
-            }
-          }
-        }
-        if (!isLevelPresent) {
-          VBarGraphData vBarGraphData = new VBarGraphData();
-          vBarGraphData.setName(level);
-          vBarGraphData.setDataCounts(vNameValues);
-          graphDatas.add(vBarGraphData);
-        } else {
-          graphDatas.add(vData1);
-        }
-      }
-
-      dataList.setGraphData(graphDatas);
-      return convertObjToString(dataList);
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error(e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-
-    }
-  }
-
-  public void arrangeLevel(String level,
-                           List<VBarGraphData> histogramDataLocal,
-                           List<VBarGraphData> histogramData) {
-    for (VBarGraphData histData : histogramData) {
-      if (histData != null && level.equals(histData.getName())) {
-        histogramDataLocal.add(histData);
-      }
-    }
-  }
-
-  public String cancelFindRequestByDate(String uniqueId) {
-    if (StringUtils.isEmpty(uniqueId)) {
-      logger.error("Unique id is Empty");
-      throw RESTErrorUtil.createRESTException("Unique id is Empty",
-        MessageEnums.DATA_NOT_FOUND);
-    }
-
-    if (cancelByDate.remove(uniqueId)) {
-      mapUniqueId.remove(uniqueId);
-      return "Cancel Request Successfully Procssed ";
-    }
-    return "Cancel Request Unable to Process";
-  }
-
-  public boolean cancelRequest(String uniqueId) {
-    if (StringUtils.isBlank(uniqueId)) {
-      logger.error("Unique id is Empty");
-      throw RESTErrorUtil.createRESTException("Unique id is Empty",
-        MessageEnums.DATA_NOT_FOUND);
-    }
-    for (String date : cancelByDate) {
-      if (uniqueId.equalsIgnoreCase(date)){
-        return false;
-      }
-    }
-    return true;
-  }
-
-  public Response exportToTextFile(SearchCriteria searchCriteria) {
-    String defaultFormat = "text";
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    String from = (String) searchCriteria.getParamValue("from");
-    String to = (String) searchCriteria.getParamValue("to");
-    String utcOffset = (String) searchCriteria.getParamValue("utcOffset");
-    String format = (String) searchCriteria.getParamValue("format");
-
-    format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
-        : ".json";
-    
-    if(StringUtils.isBlank(utcOffset)){
-      utcOffset = "0";
-    }
-
-    if (!DateUtil.isDateValid(from) || !DateUtil.isDateValid(to)) {
-      logger.error("Not valid date format. Valid format should be"
-          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
-      throw RESTErrorUtil.createRESTException("Not valid date format. Valid format should be"
-          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z,
-          MessageEnums.INVALID_INPUT_DATA);
-
-    } else {
-      from = from.replace("T", " ");
-      from = from.replace(".", ",");
-
-      to = to.replace("T", " ");
-      to = to.replace(".", ",");
-
-      to = DateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
-          "yyyy-MM-dd HH:mm:ss,SSS");
-      from = DateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
-          "yyyy-MM-dd HH:mm:ss,SSS");
-    }
-
-    String fileName = DateUtil.getCurrentDateInString();
-    if (searchCriteria.getParamValue("hostLogFile") != null
-      && searchCriteria.getParamValue("compLogFile") != null) {
-      fileName = searchCriteria.getParamValue("hostLogFile") + "_"
-        + searchCriteria.getParamValue("compLogFile");
-    }
-
-    String textToSave = "";
-    try {
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if (response == null) {
-        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-      }
-      SolrDocumentList docList = response.getResults();
-      if (docList == null) {
-        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-      }
-
-      VSummary vsummary = BizUtil.buildSummaryForLogFile(docList);
-      vsummary.setFormat(format);
-      vsummary.setFrom(from);
-      vsummary.setTo(to);
-
-      String includeString = (String) searchCriteria.getParamValue("iMessage");
-      if (StringUtils.isBlank(includeString)) {
-        includeString = "";
-      }
-
-      String include[] = includeString.split(LogSearchConstants.I_E_SEPRATOR);
-
-      for (String inc : include) {
-        includeString = includeString + ",\"" + inc + "\"";
-      }
-      includeString = includeString.replaceFirst(",", "");
-      if (!StringUtils.isBlank(includeString)) {
-        vsummary.setIncludeString(includeString);
-      }
-
-      String excludeString = null;
-      boolean isNormalExcluded = false;
-
-      excludeString = (String) searchCriteria.getParamValue("eMessage");
-      if (StringUtils.isBlank(excludeString)) {
-        excludeString = "";
-      }
-
-      String exclude[] = excludeString.split(LogSearchConstants.I_E_SEPRATOR);
-      for (String exc : exclude) {
-        excludeString = excludeString + ",\"" + exc + "\"";
-      }
-
-      excludeString = excludeString.replaceFirst(",", "");
-      if (!StringUtils.isBlank(excludeString)) {
-        vsummary.setExcludeString(excludeString);
-        isNormalExcluded = true;
-      }
-
-      String globalExcludeString = (String) searchCriteria
-          .getParamValue("gEMessage");
-      if (StringUtils.isBlank(globalExcludeString)) {
-        globalExcludeString = "";
-      }
-
-      String globalExclude[] = globalExcludeString
-          .split(LogSearchConstants.I_E_SEPRATOR);
-
-      for (String exc : globalExclude) {
-        excludeString = excludeString + ",\"" + exc + "\"";
-      }
-
-      if (!StringUtils.isBlank(excludeString)) {
-        if (!isNormalExcluded) {
-          excludeString = excludeString.replaceFirst(",", "");
-        }
-        vsummary.setExcludeString(excludeString);
-      }
-
-      for (SolrDocument solrDoc : docList) {
-
-        Date logTimeDateObj = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
-        if(logTimeDateObj != null){
-        String logTime = DateUtil.convertSolrDateToNormalDateFormat(
-            logTimeDateObj.getTime(), Long.parseLong(utcOffset));
-        solrDoc.remove(LogSearchConstants.LOGTIME);
-        solrDoc.addField(LogSearchConstants.LOGTIME, logTime);
-        }
-      }
-
-      if (format.toLowerCase(Locale.ENGLISH).equals(".txt")) {
-        textToSave = BizUtil.convertObjectToNormalText(docList);
-      } else if (format.toLowerCase(Locale.ENGLISH).equals(".json")) {
-        textToSave = convertObjToString(docList);
-      } else {
-        throw RESTErrorUtil.createRESTException(
-            "unsoported format either should be json or text",
-            MessageEnums.ERROR_SYSTEM);
-      }
-      return FileUtil.saveToFile(textToSave, fileName, vsummary);
-
-    } catch (SolrException | SolrServerException | IOException
-      | ParseException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public String getComponentListWithLevelCounts(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.setParam("event", "/service/logs/components/level/counts");
-
-    if (searchCriteria.getSortBy() == null) {
-      searchCriteria.setSortBy(LogSearchConstants.SOLR_COMPONENT);
-      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
-    }
-    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
-    String componentLevelHirachy = "type,level";
-    VNodeList list = new VNodeList();
-    try {
-
-      SolrUtil.setFacetPivot(solrQuery, 1, componentLevelHirachy);
-
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-
-      List<List<PivotField>> listPivotField = new ArrayList<List<PivotField>>();
-      NamedList<List<PivotField>> namedList = response.getFacetPivot();
-      if (namedList != null) {
-        listPivotField = namedList.getAll(componentLevelHirachy);
-      }
-      List<PivotField> secondHirarchicalPivotFields = null;
-      if (listPivotField == null || listPivotField.isEmpty()) {
-        return convertObjToString(list);
-      } else {
-        secondHirarchicalPivotFields = listPivotField.get(0);
-      }
-      List<VNode> datatList = new ArrayList<VNode>();
-      for (PivotField singlePivotField : secondHirarchicalPivotFields) {
-        if (singlePivotField != null) {
-          VNode comp = new VNode();
-          comp.setName("" + singlePivotField.getValue());
-          List<PivotField> levelList = singlePivotField.getPivot();
-          List<VNameValue> levelCountList = new ArrayList<VNameValue>();
-          comp.setLogLevelCount(levelCountList);
-          if(levelList != null){
-          for (PivotField levelPivot : levelList) {
-		  VNameValue level = new VNameValue();
-		  level.setName(("" + levelPivot.getValue()).toUpperCase());
-		  level.setValue("" + levelPivot.getCount());
-		  levelCountList.add(level);
-		}
-          }
-          datatList.add(comp);
-        }
-      }
-      list.setvNodeList(datatList);
-      return convertObjToString(list);
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public String getExtremeDatesForBundelId(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = new SolrQuery();
-    VNameValueList nameValueList = new VNameValueList();
-    try {
-      String bundelId = (String) searchCriteria
-        .getParamValue(LogSearchConstants.BUNDLE_ID);
-      if(StringUtils.isBlank(bundelId)){
-        bundelId = "";
-      }
-
-      queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.BUNDLE_ID, bundelId);
-
-      SolrUtil.setMainQuery(solrQuery, null);
-      solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.asc);
-      SolrUtil.setRowCount(solrQuery, 1);
-
-      List<VNameValue> vNameValues = new ArrayList<VNameValue>();
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-
-      if(response == null){
-        return convertObjToString(nameValueList);
-      }
-
-      SolrDocumentList solrDocList = response.getResults();
-      if(solrDocList == null){
-        return convertObjToString(nameValueList);
-      }
-      for (SolrDocument solrDoc : solrDocList) {
-
-        Date logTimeAsc = (Date) solrDoc
-          .getFieldValue(LogSearchConstants.LOGTIME);
-        if (logTimeAsc != null) {
-          VNameValue nameValue = new VNameValue();
-          nameValue.setName("From");
-          nameValue.setValue("" + logTimeAsc.getTime());
-          vNameValues.add(nameValue);
-        }
-      }
-
-      solrQuery.clear();
-      SolrUtil.setMainQuery(solrQuery, null);
-      queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.BUNDLE_ID, bundelId);
-      solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
-      SolrUtil.setRowCount(solrQuery, 1);
-
-      solrDocList.clear();
-      response = serviceLogsSolrDao.process(solrQuery);
-
-      solrDocList = response.getResults();
-      for (SolrDocument solrDoc : solrDocList) {
-        if (solrDoc != null) {
-          Date logTimeDesc = (Date) solrDoc
-              .getFieldValue(LogSearchConstants.LOGTIME);
-
-          if (logTimeDesc != null) {
-            VNameValue nameValue = new VNameValue();
-            nameValue.setName("To");
-            nameValue.setValue("" + logTimeDesc.getTime());
-            vNameValues.add(nameValue);
-          }
-        }
-      }
-      nameValueList.setVNameValues(vNameValues);
-
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
-      nameValueList=new VNameValueList();
-    }
-    return convertObjToString(nameValueList);
-  }
-
-  protected VGroupList getSolrGroupList(SolrQuery query)
-      throws SolrServerException, IOException, SolrException {
-    VGroupList collection = new VGroupList();
-    QueryResponse response = serviceLogsSolrDao.process(query);
-    if (response == null) {
-      return collection;
-    }
-    SolrDocumentList docList = response.getResults();
-    if (docList != null) {
-      collection.setGroupDocuments(docList);
-      collection.setStartIndex((int) docList.getStart());
-      collection.setTotalCount(docList.getNumFound());
-    }
-
-    return collection;
-  }
-
-  public String getServiceLogsFieldsName() {
-    String fieldsNameStrArry[] = PropertiesHelper
-      .getPropertyStringList("logsearch.service.logs.fields");
-    if (fieldsNameStrArry.length > 0) {
-
-      List<String> uiFieldNames = new ArrayList<String>();
-      String temp = null;
-      for (String field : fieldsNameStrArry) {
-        temp = ConfigHelper.serviceLogsColumnMapping.get(field
-            + LogSearchConstants.SOLR_SUFFIX);
-        if (temp == null){
-          uiFieldNames.add(field);
-        }else{
-          uiFieldNames.add(temp);
-        }
-      }
-      return convertObjToString(uiFieldNames);
-
-    }
-    throw RESTErrorUtil.createRESTException(
-      "No field name found in property file",
-      MessageEnums.DATA_NOT_FOUND);
-
-  }
-
-  public String getServiceLogsSchemaFieldsName() {
-
-    List<String> fieldNames = new ArrayList<String>();
-    String excludeArray[] = PropertiesHelper
-        .getPropertyStringList("logsearch.solr.service.logs.exclude.columnlist");
-
-    HashMap<String, String> uiFieldColumnMapping = new LinkedHashMap<String, String>();
-    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames,serviceLogsSolrDao);
-
-    for (String fieldName : fieldNames) {
-      String uiField = ConfigHelper.serviceLogsColumnMapping.get(fieldName
-          + LogSearchConstants.SOLR_SUFFIX);
-      if (uiField != null) {
-        uiFieldColumnMapping.put(fieldName, uiField);
-      } else {
-        uiFieldColumnMapping.put(fieldName, fieldName);
-      }
-    }
-
-    HashMap<String, String> uiFieldColumnMappingSorted = new LinkedHashMap<String, String>();
-    uiFieldColumnMappingSorted.put(LogSearchConstants.SOLR_LOG_MESSAGE, LogSearchConstants.SOLR_LOG_MESSAGE);
-
-    Iterator<Entry<String, String>> it = BizUtil
-        .sortHashMapByValues(uiFieldColumnMapping).entrySet().iterator();
-    while (it.hasNext()) {
-      @SuppressWarnings("rawtypes")
-      Map.Entry pair = (Map.Entry) it.next();
-      uiFieldColumnMappingSorted.put("" + pair.getKey(), "" + pair.getValue());
-    }
-
-    return convertObjToString(uiFieldColumnMappingSorted);
-
-  }
-
-  @SuppressWarnings("unchecked")
-  public void extractValuesFromBuckets(
-    SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
-    String innerField, List<VBarGraphData> histogramData) {
-    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
-      .get(outerField);
-    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack
-      .get("buckets");
-    for (Object temp : stackBuckets) {
-      VBarGraphData vBarGraphData = new VBarGraphData();
-
-      SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) temp;
-      String name = ((String) level.getVal(0)).toUpperCase();
-      vBarGraphData.setName(name);
-
-      Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-      vBarGraphData.setDataCounts(vNameValues);
-      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level
-        .get(innerField)).get("buckets");
-      for (Object temp1 : levelBuckets) {
-        SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = DateUtil
-          .convertDateWithMillisecondsToSolrDate((Date) countValue
-            .getVal(0));
-
-        String count = "" + countValue.getVal(1);
-        VNameValue vNameValue = new VNameValue();
-        vNameValue.setName(value);
-        vNameValue.setValue(count);
-        vNameValues.add(vNameValue);
-      }
-      histogramData.add(vBarGraphData);
-    }
-  }
-
-  public String getAnyGraphData(SearchCriteria searchCriteria) {
-    searchCriteria.addParam("fieldTime", LogSearchConstants.LOGTIME);
-    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
-        serviceLogsSolrDao, solrQuery);
-    if (result == null) {
-      result = new VBarDataList();
-    }
-    return convertObjToString(result);
-
-  }
-
-  public String getAfterBeforeLogs(SearchCriteria searchCriteria) {
-    VSolrLogList vSolrLogList = new VSolrLogList();
-    SolrDocumentList docList = null;
-    String id = (String) searchCriteria
-      .getParamValue(LogSearchConstants.ID);
-    if (StringUtils.isBlank(id)) {
-      return convertObjToString(vSolrLogList);
-
-    }
-    String maxRows = "";
-
-    maxRows = (String) searchCriteria.getParamValue("numberRows");
-    if (StringUtils.isBlank(maxRows)){
-      maxRows = ""+maxRows;
-    }
-    String scrollType = (String) searchCriteria.getParamValue("scrollType");
-    if(StringUtils.isBlank(scrollType)){
-      scrollType = "";
-    }
-
-    String logTime = null;
-    String sequenceId = null;
-    try {
-      SolrQuery solrQuery = new SolrQuery();
-      SolrUtil.setMainQuery(solrQuery,
-        queryGenerator.buildFilterQuery(LogSearchConstants.ID, id));
-      SolrUtil.setRowCount(solrQuery, 1);
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if(response == null){
-        return convertObjToString(vSolrLogList);
-      }
-      docList = response.getResults();
-      if (docList != null && !docList.isEmpty()) {
-        Date date = (Date) docList.get(0).getFieldValue(
-          LogSearchConstants.LOGTIME);
-        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(date);
-        sequenceId = ""
-          + docList.get(0).getFieldValue(
-          LogSearchConstants.SEQUNCE_ID);
-      }
-      if (StringUtils.isBlank(logTime)) {
-        return convertObjToString(vSolrLogList);
-      }
-    } catch (SolrServerException | SolrException | IOException e) {
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-    if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType)) {
-      vSolrLogList = whenScrollUp(searchCriteria, logTime,
-        sequenceId, maxRows);
-
-      SolrDocumentList solrDocList = new SolrDocumentList();
-      for (SolrDocument solrDoc : vSolrLogList.getList()) {
-        solrDocList.add(solrDoc);
-      }
-      vSolrLogList.setSolrDocuments(solrDocList);
-        return convertObjToString(vSolrLogList);
-
-    } else if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) {
-      SolrDocumentList solrDocList = new SolrDocumentList();
-      vSolrLogList = new VSolrLogList();
-      for (SolrDocument solrDoc : whenScrollDown(searchCriteria, logTime,
-          sequenceId, maxRows).getList()) {
-        solrDocList.add(solrDoc);
-      }
-      vSolrLogList.setSolrDocuments(solrDocList);
-      return convertObjToString(vSolrLogList);
-
-    } else {
-      vSolrLogList = new VSolrLogList();
-      SolrDocumentList initial = new SolrDocumentList();
-      SolrDocumentList before = whenScrollUp(searchCriteria, logTime,
-        sequenceId, maxRows).getList();
-      SolrDocumentList after = whenScrollDown(searchCriteria, logTime,
-        sequenceId, maxRows).getList();
-			if (before != null && !before.isEmpty()) {
-				for (SolrDocument solrDoc : Lists.reverse(before)) {
-					initial.add(solrDoc);
-				}
-			}
-
-      initial.add(docList.get(0));
-      if (after != null && !after.isEmpty()){
-        for (SolrDocument solrDoc : after) {
-          initial.add(solrDoc);
-	      }
-      }
-
-      vSolrLogList.setSolrDocuments(initial);
-
-        return convertObjToString(vSolrLogList);
-
-    }
-  }
-
-  private VSolrLogList whenScrollUp(SearchCriteria searchCriteria,
-                                    String logTime, String sequenceId, String maxRows) {
-    SolrQuery solrQuery = new SolrQuery();
-    SolrUtil.setMainQuery(solrQuery, null);
-    /*queryGenerator.setSingleExcludeFilter(solrQuery,
-        LogSearchConstants.SEQUNCE_ID, sequenceId);*/
-    try {
-      int seq_num = Integer.parseInt(sequenceId) - 1;
-      sequenceId = "" + seq_num;
-    } catch (Exception e) {
-
-    }
-    queryGenerator.setSingleRangeFilter(
-      solrQuery,
-      LogSearchConstants.SEQUNCE_ID, "*", sequenceId);
-
-    queryGenerator.applyLogFileFilter(solrQuery, searchCriteria);
-
-    queryGenerator.setSingleRangeFilter(solrQuery,
-      LogSearchConstants.LOGTIME, "*", logTime);
-    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
-    String order1 = LogSearchConstants.LOGTIME + " "
-      + LogSearchConstants.DESCENDING_ORDER;
-    String order2 = LogSearchConstants.SEQUNCE_ID + " "
-      + LogSearchConstants.DESCENDING_ORDER;
-    List<String> sortOrder = new ArrayList<String>();
-    sortOrder.add(order1);
-    sortOrder.add(order2);
-    searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
-    queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
-
-    return getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
-  }
-
-  private VSolrLogList whenScrollDown(SearchCriteria searchCriteria,
-                                      String logTime, String sequenceId, String maxRows) {
-    SolrQuery solrQuery = new SolrQuery();
-    SolrUtil.setMainQuery(solrQuery, null);
-    queryGenerator.applyLogFileFilter(solrQuery, searchCriteria);
-
-    /*queryGenerator.setSingleExcludeFilter(solrQuery,
-        LogSearchConstants.SEQUNCE_ID, sequenceId);*/
-    try {
-      int seq_num = Integer.parseInt(sequenceId) + 1;
-      sequenceId = "" + seq_num;
-    } catch (Exception e) {
-
-    }
-    queryGenerator.setSingleRangeFilter(
-      solrQuery,
-      LogSearchConstants.SEQUNCE_ID, sequenceId, "*");
-    queryGenerator.setSingleRangeFilter(solrQuery,
-      LogSearchConstants.LOGTIME, logTime, "*");
-    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
-
-    String order1 = LogSearchConstants.LOGTIME + " "
-      + LogSearchConstants.ASCENDING_ORDER;
-    String order2 = LogSearchConstants.SEQUNCE_ID + " "
-      + LogSearchConstants.ASCENDING_ORDER;
-    List<String> sortOrder = new ArrayList<String>();
-    sortOrder.add(order1);
-    sortOrder.add(order2);
-    searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
-    queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
-
-    return getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
-  }
-
-  @Scheduled(cron = "${logsearch.solr.warming.cron}")
-  public void warmingSolrServer(){
-    logger.info("solr warming triggered.");
-    SolrQuery solrQuery = new SolrQuery();
-    TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT");
-    GregorianCalendar utc = new GregorianCalendar(gmtTimeZone);
-    utc.setTimeInMillis(new Date().getTime());
-    utc.set(Calendar.HOUR, 0);
-    utc.set(Calendar.MINUTE, 0);
-    utc.set(Calendar.MILLISECOND, 001);
-    utc.set(Calendar.SECOND, 0);
-    DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
-    String from = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
-    utc.set(Calendar.MILLISECOND, 999);
-    utc.set(Calendar.SECOND, 59);
-    utc.set(Calendar.MINUTE, 59);
-    utc.set(Calendar.HOUR, 23);
-    String to = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
-    queryGenerator.setSingleRangeFilter(solrQuery,
-        LogSearchConstants.LOGTIME, from,to);
-    String level = LogSearchConstants.FATAL+","+LogSearchConstants.ERROR+","+LogSearchConstants.WARN;
-    queryGenerator.setFilterClauseWithFieldName(solrQuery, level,
-        LogSearchConstants.SOLR_LEVEL, "", QueryGenerationBase.Condition.OR);
-    try {
-      serviceLogsSolrDao.process(solrQuery);
-    } catch (SolrServerException | IOException e) {
-      logger.error("Error while warming solr server",e);
-    }
-  }
-
-
-}


[17/50] [abbrv] ambari git commit: AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index e612475..a0c1134 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logsearch.dao;
 
 import java.io.File;
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -40,46 +41,40 @@ import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import com.google.gson.JsonParseException;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
+import org.springframework.util.CollectionUtils;
 
 @Component
 public class UserConfigSolrDao extends SolrDaoBase {
 
-  static private Logger logger = Logger.getLogger(UserConfigSolrDao.class);
+  private static final Logger logger = Logger.getLogger(UserConfigSolrDao.class);
   private static final String DEFAULT_LEVELS = "FATAL,ERROR,WARN,INFO,DEBUG,TRACE";
 
   public UserConfigSolrDao() {
-    super(LOG_TYPE.SERVICE);
+    super(LogType.SERVICE);
   }
 
   @PostConstruct
   public void postConstructor() {
-
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
     String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.history",
-      "history");
-    String configName = PropertiesUtil.getProperty(
-      "logsearch.solr.history.config.name", "history");
-    int replicationFactor = PropertiesUtil.getIntProperty(
-      "logsearch.collection.history.replication.factor", 2);
+    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.history", "history");
+    String configName = PropertiesUtil.getProperty("logsearch.solr.history.config.name", "history");
+    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.history.replication.factor", 2);
     String splitInterval = "none";
     int numberOfShards = 1;
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
-      setupCollections(splitInterval, configName, numberOfShards,
-        replicationFactor,true);
+      setupCollections(splitInterval, configName, numberOfShards, replicationFactor, true);
       intializeLogFeederFilter();
 
     } catch (Exception e) {
-      logger.error(
-        "error while connecting to Solr for history logs : solrUrl="
-          + solrUrl + ", zkConnectString=" + zkConnectString
-          + ", collection=" + collection, e);
+      logger.error("error while connecting to Solr for history logs : solrUrl=" + solrUrl + ", zkConnectString=" + zkConnectString +
+          ", collection=" + collection, e);
     }
   }
 
@@ -91,8 +86,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
     }
   }
 
-  public void saveUserFiter(VLogfeederFilterWrapper logfeederFilterWrapper) throws SolrException,
-      SolrServerException, IOException {
+  public void saveUserFilter(VLogfeederFilterWrapper logfeederFilterWrapper) throws SolrException, SolrServerException, IOException {
     String filterName = LogSearchConstants.LOGFEEDER_FILTER_NAME;
     String json = jsonUtil.objToJson(logfeederFilterWrapper);
     SolrInputDocument configDocument = new SolrInputDocument();
@@ -104,47 +98,32 @@ public class UserConfigSolrDao extends SolrDaoBase {
     addDocs(configDocument);
   }
 
-  public void deleteUserConfig(String id) throws SolrException,
-      SolrServerException, IOException {
+  public void deleteUserConfig(String id) throws SolrException, SolrServerException, IOException {
     removeDoc("id:" + id);
   }
 
 	@SuppressWarnings("unchecked")
-  public VLogfeederFilterWrapper getUserFilter() throws SolrServerException,
-      IOException {
+  public VLogfeederFilterWrapper getUserFilter() throws SolrServerException, IOException {
 
-    String filterName = LogSearchConstants.LOGFEEDER_FILTER_NAME;
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
-    String fq = LogSearchConstants.ROW_TYPE + ":" + filterName;
+    String fq = LogSearchConstants.ROW_TYPE + ":" + LogSearchConstants.LOGFEEDER_FILTER_NAME;
     solrQuery.setFilterQueries(fq);
 
     QueryResponse response = process(solrQuery);
     SolrDocumentList documentList = response.getResults();
     VLogfeederFilterWrapper logfeederFilterWrapper = null;
-    if (documentList != null && documentList.size() > 0) {
+    if (!CollectionUtils.isEmpty(documentList)) {
       SolrDocument configDoc = documentList.get(0);
       String configJson = jsonUtil.objToJson(configDoc);
-      HashMap<String, Object> configMap = (HashMap<String, Object>) jsonUtil
-          .jsonToMapObject(configJson);
+      HashMap<String, Object> configMap = (HashMap<String, Object>) jsonUtil.jsonToMapObject(configJson);
       String json = (String) configMap.get(LogSearchConstants.VALUES);
-      logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(
-          json, VLogfeederFilterWrapper.class);
+      logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
       logfeederFilterWrapper.setId("" + configDoc.get(LogSearchConstants.ID));
 
     } else {
-      String logfeederDefaultLevels = PropertiesUtil.getProperty(
-          "logsearch.logfeeder.include.default.level", DEFAULT_LEVELS);
-      JSONArray levelJsonArray = new JSONArray();
-      try {
-        String levelArray[] = logfeederDefaultLevels.split(",");
-        for (String level : levelArray) {
-          levelJsonArray.put(level.toUpperCase());
-        }
-      } catch (Exception e) {
-        logger.error("Error spliting logfeederDefaultLevels="
-            + logfeederDefaultLevels, e);
-      }
+      String logfeederDefaultLevels = PropertiesUtil.getProperty("logsearch.logfeeder.include.default.level", DEFAULT_LEVELS);
+      JSONArray levelJsonArray = new JSONArray(Arrays.asList(logfeederDefaultLevels.split(",")));
 
       String hadoopServiceString = getHadoopServiceConfigJSON();
       String key = null;
@@ -153,13 +132,11 @@ public class UserConfigSolrDao extends SolrDaoBase {
         JSONObject componentList = new JSONObject();
         JSONObject jsonValue = new JSONObject();
 
-        JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceString)
-            .getJSONObject("service");
+        JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceString).getJSONObject("service");
         Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys();
         while (hadoopSerivceKeys.hasNext()) {
           key = hadoopSerivceKeys.next();
-          componentArray = hadoopServiceJsonObject.getJSONObject(key)
-              .getJSONArray("components");
+          componentArray = hadoopServiceJsonObject.getJSONObject(key).getJSONArray("components");
           for (int i = 0; i < componentArray.length(); i++) {
             JSONObject compJsonObject = (JSONObject) componentArray.get(i);
             String componentName = compJsonObject.getString("name");
@@ -171,27 +148,24 @@ public class UserConfigSolrDao extends SolrDaoBase {
           }
         }
         jsonValue.put("filter", componentList);
-        logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil
-            .jsonToObj(jsonValue.toString(), VLogfeederFilterWrapper.class);
+        logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(jsonValue.toString(), VLogfeederFilterWrapper.class);
         logfeederFilterWrapper.setId(""+new Date().getTime());
-        saveUserFiter(logfeederFilterWrapper);
+        saveUserFilter(logfeederFilterWrapper);
 
       } catch (JsonParseException | JSONException je) {
-        logger.error("Error parsing JSON. key=" + key + ", componentArray="
-            + componentArray, je);
+        logger.error("Error parsing JSON. key=" + key + ", componentArray=" + componentArray, je);
         logfeederFilterWrapper = new VLogfeederFilterWrapper();
       }
     }
     return logfeederFilterWrapper;
   }
 
-  public String getHadoopServiceConfigJSON() {
+  private String getHadoopServiceConfigJSON() {
     StringBuilder result = new StringBuilder("");
 
     // Get file from resources folder
     ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("HadoopServiceConfig.json")
-        .getFile());
+    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
 
     try (Scanner scanner = new Scanner(file)) {
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index 6b2f049..b7853df 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -20,46 +20,43 @@ package org.apache.ambari.logsearch.dao;
 
 import java.io.File;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
-import java.util.List;
 
 import javax.annotation.PostConstruct;
 
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.security.authentication.encoding.Md5PasswordEncoder;
 import org.springframework.security.core.GrantedAuthority;
 import org.springframework.stereotype.Repository;
+
+import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.ambari.logsearch.util.FileUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.web.model.Privilege;
 import org.apache.ambari.logsearch.web.model.Role;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.ambari.logsearch.web.security.LogsearchFileAuthenticationProvider;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.collections.Predicate;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 
 @Repository
 public class UserDao {
-
   private static final Logger logger = Logger.getLogger(UserDao.class);
-  private static final Md5PasswordEncoder md5Encoder = new Md5PasswordEncoder();
 
-  @Autowired
-  JSONUtil jsonUtil;
+  private static final String USER_NAME = "username";
+  private static final String PASSWORD = "password";
+  private static final String ENC_PASSWORD = "en_password";
+  private static final String NAME = "name";
 
   @Autowired
-  StringUtil stringUtil;
-
+  private JSONUtil jsonUtil;
   @Autowired
-  FileUtil fileUtil;
-
+  private FileUtil fileUtil;
   @Autowired
-  LogsearchFileAuthenticationProvider fileAuthenticationProvider;
-
-  private HashMap<String, Object> userInfos = null;
+  private LogsearchFileAuthenticationProvider fileAuthenticationProvider;
 
   private ArrayList<HashMap<String, String>> userList = null;
 
@@ -68,23 +65,17 @@ public class UserDao {
   public void initialization() {
     if (fileAuthenticationProvider.isEnable()) {
       try {
-        String USER_PASS_JSON_FILE_NAME = PropertiesUtil
-          .getProperty("logsearch.login.credentials.file");
-        logger.info("USER PASS JSON  file NAME:" + USER_PASS_JSON_FILE_NAME);
-        File jsonFile = fileUtil
-          .getFileFromClasspath(USER_PASS_JSON_FILE_NAME);
+        String userPassJsonFileName = PropertiesUtil.getProperty("logsearch.login.credentials.file");
+        logger.info("USER PASS JSON  file NAME:" + userPassJsonFileName);
+        File jsonFile = fileUtil.getFileFromClasspath(userPassJsonFileName);
         if (jsonFile == null || !jsonFile.exists()) {
-          logger.fatal("user_pass json file not found in classpath :"
-            + USER_PASS_JSON_FILE_NAME);
+          logger.fatal("user_pass json file not found in classpath :" + userPassJsonFileName);
           System.exit(1);
         }
-        userInfos = jsonUtil.readJsonFromFile(jsonFile);
-        userList = (ArrayList<HashMap<String, String>>) userInfos
-          .get("users");
+        HashMap<String, Object> userInfos = jsonUtil.readJsonFromFile(jsonFile);
+        userList = (ArrayList<HashMap<String, String>>) userInfos.get("users");
         if (userList != null) {
-          // encrypting password using MD5 algo with salt username
           boolean isUpdated = this.encryptAllPassword();
-          // updating json
           userInfos.put("users", userList);
           if (isUpdated) {
             String jsonStr = jsonUtil.mapToJSON(userInfos);
@@ -95,108 +86,78 @@ public class UserDao {
         }
 
       } catch (Exception exception) {
-        logger.error("Error while reading user prop file :"
-          + exception.getMessage());
-        userInfos = new HashMap<String, Object>();
+        logger.error("Error while reading user prop file :" + exception.getMessage());
         userList = new ArrayList<HashMap<String, String>>();
       }
     } else {
       logger.info("File auth is disabled.");
     }
-
   }
 
-  /**
-   * @param username
-   * @return
-   */
   public User loadUserByUsername(final String username) {
     logger.debug(" loadUserByUsername username" + username);
-    HashMap<String, Object> userInfo = this.findByusername(username);
+    HashMap<String, String> userInfo = findByusername(username);
     User user = new User();
 
     if (userInfo != null) {
-      user.setFirstName(userInfo.get(UserInfoAttributes.NAME) != null ? (String) userInfo
-        .get(UserInfoAttributes.NAME) : "Unknown");
-      user.setLastName(userInfo.get(UserInfoAttributes.NAME) != null ? (String) userInfo
-        .get(UserInfoAttributes.NAME) : "Unknown");
-      user.setUsername(userInfo.get(UserInfoAttributes.USER_NAME) != null ? (String) userInfo
-        .get(UserInfoAttributes.USER_NAME) : "");
-      user.setPassword(userInfo.get(UserInfoAttributes.ENC_PASSWORD) != null ? (String) userInfo
-        .get(UserInfoAttributes.ENC_PASSWORD) : "");
+      user.setFirstName(userInfo.get(NAME) != null ? userInfo.get(NAME) : "Unknown");
+      user.setLastName(userInfo.get(NAME) != null ? userInfo.get(NAME) : "Unknown");
+      user.setUsername(userInfo.get(USER_NAME) != null ? userInfo.get(USER_NAME) : "");
+      user.setPassword(userInfo.get(ENC_PASSWORD) != null ? userInfo.get(ENC_PASSWORD) : "");
     }
 
     Role r = new Role();
     r.setName("ROLE_USER");
     Privilege priv = new Privilege();
     priv.setName("READ_PRIVILEGE");
-    ArrayList<Privilege> plist = new ArrayList<Privilege>();
-    plist.add(priv);
-    r.setPrivileges(plist);
-    List<GrantedAuthority> roles = new ArrayList<GrantedAuthority>();
-    roles.add(r);
-    user.setAuthorities(roles);
+    r.setPrivileges(Arrays.asList(priv));
+    user.setAuthorities(Arrays.asList((GrantedAuthority)r));
+    
     return user;
   }
 
-  /**
-   * @param username
-   * @return
-   */
-  public HashMap<String, Object> findByusername(final String username) {
-    if (this.userList == null) {
+  private HashMap<String, String> findByusername(final String username) {
+    if (userList == null) {
       return null;
     }
     @SuppressWarnings("unchecked")
-    HashMap<String, Object> userInfo = (HashMap<String, Object>) CollectionUtils
-      .find(this.userList, new Predicate() {
-        @Override
-        public boolean evaluate(Object args) {
-          HashMap<String, Object> tmpuserInfo = (HashMap<String, Object>) args;
-          String objUsername = (String) tmpuserInfo
-            .get(UserInfoAttributes.USER_NAME);
-          if (objUsername != null && username != null) {
-            return username.equalsIgnoreCase(objUsername);
+    HashMap<String, String> userInfo = (HashMap<String, String>) CollectionUtils.find(userList,
+        new Predicate() {
+          @Override
+          public boolean evaluate(Object args) {
+            HashMap<String, String> tmpUserInfo = (HashMap<String, String>) args;
+            String objUsername = tmpUserInfo.get(USER_NAME);
+            return (objUsername != null && username != null && username.equalsIgnoreCase(objUsername));
           }
-          return false;
-        }
-      });
+        });
+    
     return userInfo;
   }
 
   private boolean encryptAllPassword() {
     boolean isUpdated = false;
     for (HashMap<String, String> user : userList) {
-      // user
-      String encPassword = user.get(UserInfoAttributes.ENC_PASSWORD);
-      String username = user.get(UserInfoAttributes.USER_NAME);
-      String password = user.get(UserInfoAttributes.PASSWORD);
-      if (!stringUtil.isEmpty(password)) {
-        encPassword = encryptPassword(username, password);
-        user.put(UserInfoAttributes.PASSWORD, "");
-        user.put(UserInfoAttributes.ENC_PASSWORD, encPassword);
+      String encPassword = user.get(ENC_PASSWORD);
+      String username = user.get(USER_NAME);
+      String password = user.get(PASSWORD);
+      if (!StringUtils.isBlank(password)) {
+        encPassword = CommonUtil.encryptPassword(username, password);
+        user.put(PASSWORD, "");
+        user.put(ENC_PASSWORD, encPassword);
         isUpdated = true;
       }
-      if (stringUtil.isEmpty(password) && stringUtil.isEmpty(encPassword)) {
-        // log error
-        logger.error("Password is empty or null for username : "
-          + username);
+      if (StringUtils.isBlank(password) && StringUtils.isBlank(encPassword)) {
+        logger.error("Password is empty or null for username : " + username);
       }
     }
     return isUpdated;
   }
-
-  /**
-   * @param username
-   * @param password
-   * @return
-   */
+  
   public String encryptPassword(String username, String password) {
-    if (!stringUtil.isEmpty(username)) {
+    if (!StringUtils.isEmpty(username)) {
       username = username.toLowerCase();
     }
-    String saltEncodedpasswd = md5Encoder
-      .encodePassword(password, username);
+    String saltEncodedpasswd = CommonUtil.encryptPassword(password, username);
     return saltEncodedpasswd;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserInfoAttributes.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserInfoAttributes.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserInfoAttributes.java
deleted file mode 100644
index 7bc3555..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserInfoAttributes.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.dao;
-
-public interface UserInfoAttributes {
-
-  public static final String USER_NAME = "username";
-  public static final String PASSWORD = "password";
-  public static final String ENC_PASSWORD = "en_password";
-  public static final String NAME = "name";
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index d3975b3..3793f50 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -29,14 +29,12 @@ import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.query.QueryGeneration;
-import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VNameValue;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -46,7 +44,6 @@ import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RangeFacet;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.schema.TextField;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
@@ -55,32 +52,16 @@ import org.springframework.stereotype.Component;
 @Component
 public class GraphDataGenerator extends GraphDataGeneratorBase {
 
-  @Autowired
-  StringUtil stringUtil;
+  private static final Logger logger = Logger.getLogger(GraphDataGenerator.class);
 
   @Autowired
-  QueryGeneration queryGenerator;
-
+  private QueryGeneration queryGenerator;
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
+  private RESTErrorUtil restErrorUtil;
   @Autowired
-  DateUtil dateUtil;
-  
-  @Autowired
-  SolrUtil solrUtil;
-
-  private static Logger logger = Logger.getLogger(GraphDataGenerator.class);
+  private SolrUtil solrUtil;
 
-  /**
-   *
-   * @param searchCriteria
-   * @param solrDaoBase
-   * @param solrQuery
-   * @return
-   */
-  public VBarDataList getAnyGraphData(SearchCriteria searchCriteria,
-      SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
+  public VBarDataList getAnyGraphData(SearchCriteria searchCriteria, SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
     // X axis credentials
     String xAxisField = (String) searchCriteria.getParamValue("xAxis");
     String stackField = (String) searchCriteria.getParamValue("stackBy");
@@ -88,7 +69,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     String to = (String) searchCriteria.getParamValue("to");
     String unit = (String) searchCriteria.getParamValue("unit");
     String typeXAxis = solrDaoBase.schemaFieldsNameMap.get(xAxisField);
-    typeXAxis = (stringUtil.isEmpty(typeXAxis)) ? "string" : typeXAxis;
+    typeXAxis = (StringUtils.isBlank(typeXAxis)) ? "string" : typeXAxis;
 
     // Y axis credentials
     String yAxisField = (String) searchCriteria.getParamValue("yAxis");
@@ -96,17 +77,14 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     searchCriteria.addParam("type", typeXAxis);
     String fieldTime = (String) searchCriteria.getParamValue("fieldTime");
     // decide graph type based on user request parameter
-    GRAPH_TYPE garphType = getGraphType(searchCriteria);
+    GraphType garphType = getGraphType(searchCriteria);
     switch (garphType) {
     case NORMAL_GRAPH:
-      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
-          typeXAxis, fieldTime, solrQuery);
+      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase, typeXAxis, fieldTime, solrQuery);
     case RANGE_NON_STACK_GRAPH:
-      return rangeNonStackGraph(xAxisField, yAxisField, from, to, unit,
-          solrDaoBase, typeXAxis, fieldTime, solrQuery);
+      return rangeNonStackGraph(xAxisField, yAxisField, from, to, unit, solrDaoBase, typeXAxis, fieldTime, solrQuery);
     case NON_RANGE_STACK_GRAPH:
-      return nonRangeStackGraph(xAxisField, yAxisField, stackField, from, to,
-          solrDaoBase, typeXAxis, fieldTime, solrQuery);
+      return nonRangeStackGraph(xAxisField, yAxisField, stackField, from, to, solrDaoBase, typeXAxis, fieldTime, solrQuery);
     case RANGE_STACK_GRAPH:
       return rangeStackGraph(xAxisField, stackField, from, to, unit, solrDaoBase, solrQuery);
     default:
@@ -115,9 +93,9 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     }
   }
 
-  private GRAPH_TYPE getGraphType(SearchCriteria searchCriteria) {
+  private GraphType getGraphType(SearchCriteria searchCriteria) {
     // default graph type is unknown
-    GRAPH_TYPE graphType = GRAPH_TYPE.UNKNOWN;
+    GraphType graphType = GraphType.UNKNOWN;
     // X axis credentials
     String xAxisField = (String) searchCriteria.getParamValue("xAxis");
     String stackField = (String) searchCriteria.getParamValue("stackBy");
@@ -127,46 +105,35 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     if (xType != null) {
       // Y axis credentials
       String yAxisField = (String) searchCriteria.getParamValue("yAxis");
-      if (stringUtil.isEmpty(xAxisField) || stringUtil.isEmpty(yAxisField)) {
-        graphType = GRAPH_TYPE.UNKNOWN;
-      } else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-          && !stringUtil.isEmpty(from)
+      if (StringUtils.isBlank(xAxisField) || StringUtils.isBlank(yAxisField)) {
+        graphType = GraphType.UNKNOWN;
+      } else if (StringUtils.isBlank(stackField) && !StringUtils.isBlank(to) && !StringUtils.isBlank(from)
           && !(xType.contains("date") || xType.contains("time"))) {
-        // Normal Graph Type
-        graphType = GRAPH_TYPE.NORMAL_GRAPH;
-      } else if (stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-          && !stringUtil.isEmpty(from)
+        graphType = GraphType.NORMAL_GRAPH;
+      } else if (StringUtils.isBlank(stackField) && !StringUtils.isBlank(to) && !StringUtils.isBlank(from)
           && (xType.contains("date") || xType.contains("time"))) {
-        // Range(Non-Stack) Graph Type
-        graphType = GRAPH_TYPE.RANGE_NON_STACK_GRAPH;
-      } else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-          && !stringUtil.isEmpty(from)
+        graphType = GraphType.RANGE_NON_STACK_GRAPH;
+      } else if (!StringUtils.isBlank(stackField) && !StringUtils.isBlank(to) && !StringUtils.isBlank(from)
           && !(xType.contains("date") || xType.contains("time"))) {
-        // Non-Range Stack Graph Type
-        graphType = GRAPH_TYPE.NON_RANGE_STACK_GRAPH;
-      } else if (!stringUtil.isEmpty(stackField) && !stringUtil.isEmpty(to)
-          && !stringUtil.isEmpty(from)
+        graphType = GraphType.NON_RANGE_STACK_GRAPH;
+      } else if (!StringUtils.isBlank(stackField) && !StringUtils.isBlank(to) && !StringUtils.isBlank(from)
           && (xType.contains("date") || xType.contains("time"))) {
-        // Range Stack GraphType
-        graphType = GRAPH_TYPE.RANGE_STACK_GRAPH;
+        graphType = GraphType.RANGE_STACK_GRAPH;
       }
     }
     return graphType;
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList normalGraph(String xAxisField, String yAxisField, String from,
-      String to, SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime,
-      SolrQuery solrQuery) {
+  private VBarDataList normalGraph(String xAxisField, String yAxisField, String from, String to, SolrDaoBase solrDaoBase,
+      String typeXAxis, String fieldTime, SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
     VBarGraphData vBarGraphData = new VBarGraphData();
     Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
     queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from
-        + " TO " + to + "]");
-    if (typeXAxis.contains("string") || typeXAxis.contains("key_lower_case")
-        || typeXAxis.contains("text")) {
+    queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from + " TO " + to + "]");
+    if (typeXAxis.contains("string") || typeXAxis.contains("key_lower_case") || typeXAxis.contains("text")) {
       queryGenerator.setFacetField(solrQuery, xAxisField);
       try {
         QueryResponse response = solrDaoBase.process(solrQuery);
@@ -214,20 +181,17 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
         return dataList;
       } catch (SolrException | SolrServerException | IOException e) {
         String query = solrQuery != null ? solrQuery.toQueryString() : "";
-        logger.error("Got exception for solr query :" + query,
-            e.getCause());
+        logger.error("Got exception for solr query :" + query, e.getCause());
       }
     } else {
       queryGenerator.setRowCount(solrQuery, 0);
       String yAxis = yAxisField.contains("count") ? "sum" : yAxisField;
-      String jsonQuery = queryGenerator.buildJSONFacetAggregatedFuncitonQuery(
-          yAxis, xAxisField);
+      String jsonQuery = queryGenerator.buildJSONFacetAggregatedFuncitonQuery(yAxis, xAxisField);
       queryGenerator.setJSONFacet(solrQuery, jsonQuery);
       try {
         QueryResponse response = solrDaoBase.process(solrQuery);
-        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-            .getResponse().get("facets");
-        if (jsonFacetResponse.toString().equals("{count=0}")){
+        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
+        if (jsonFacetResponse.toString().equals("{count=0}")) {
           return dataList;
         }
         VNameValue value = new VNameValue();
@@ -242,31 +206,26 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
         return dataList;
       } catch (SolrException | SolrServerException | IOException e) {
         String query = solrQuery != null ? solrQuery.toQueryString() : "";
-        logger.error("Got exception for solr query :" + query,
-            e.getCause());
+        logger.error("Got exception for solr query :" + query, e.getCause());
       }
     }
     return null;
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList nonRangeStackGraph(String xAxisField, String yAxisField,
-      String stackField, String from, String to, SolrDaoBase solrDaoBase,
-      String typeXAxis, String fieldTime, SolrQuery solrQuery) {
+  private VBarDataList nonRangeStackGraph(String xAxisField, String yAxisField, String stackField, String from, String to,
+      SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime, SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     Collection<VBarGraphData> vGraphData = new ArrayList<VBarGraphData>();
-    String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(fieldTime,
-        from, to);
+    String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(fieldTime, from, to);
     queryGenerator.setMainQuery(solrQuery, mainQuery);
     queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     String jsonQuery = "";
     if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       String function = (yAxisField.contains("count")) ? "sum" : yAxisField;
-      jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(stackField,
-          xAxisField, function);
+      jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(stackField, xAxisField, function);
     } else {
-      jsonQuery = queryGenerator.buildJsonFacetTermsRangeQuery(stackField,
-          xAxisField);
+      jsonQuery = queryGenerator.buildJsonFacetTermsRangeQuery(stackField, xAxisField);
     }
     try {
       queryGenerator.setJSONFacet(solrQuery, jsonQuery);
@@ -279,16 +238,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       if (count <= 0) {
         return dataList;
       }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-          .getResponse().get("facets");
-      if (jsonFacetResponse == null
-          || jsonFacetResponse.toString().equals("{count=0}")) {
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
+      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
         return dataList;
       }
-      extractNonRangeStackValuesFromBucket(jsonFacetResponse, stackField,
-          vGraphData, typeXAxis);
-      if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField)
-          && LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(xAxisField)) {
+      extractNonRangeStackValuesFromBucket(jsonFacetResponse, stackField, vGraphData, typeXAxis);
+      if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField) && LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(xAxisField)) {
         Collection<VBarGraphData> levelVGraphData = dataList.getGraphData();
         for (VBarGraphData garphData : levelVGraphData) {
           Collection<VNameValue> valueList = garphData.getDataCount();
@@ -312,17 +267,14 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       return dataList;
     } catch (SolrException | IOException | SolrServerException e) {
       String query = solrQuery != null ? solrQuery.toQueryString() : "";
-      logger.error("Got exception for solr query :" + query,
-          e.getCause());
-      throw restErrorUtil.createRESTException(MessageEnums.DATA_NOT_FOUND
-          .getMessage().getMessage(), MessageEnums.DATA_NOT_FOUND);
+      logger.error("Got exception for solr query :" + query, e.getCause());
+      throw restErrorUtil.createRESTException(MessageEnums.DATA_NOT_FOUND.getMessage().getMessage(), MessageEnums.DATA_NOT_FOUND);
     }
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList rangeNonStackGraph(String xAxisField, String yAxisField,
-      String from, String to, String unit, SolrDaoBase solrDaoBase,
-      String typeXAxis, String fieldTime, SolrQuery solrQuery) {
+  private VBarDataList rangeNonStackGraph(String xAxisField, String yAxisField, String from, String to, String unit,
+      SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime, SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
     VBarGraphData vBarGraphData = new VBarGraphData();
@@ -330,8 +282,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     queryGenerator.setMainQuery(solrQuery, null);
     if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
-      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
-          typeXAxis, fieldTime, solrQuery);
+      return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase, typeXAxis, fieldTime, solrQuery);
     } else {
       try {
         queryGenerator.setFacetRange(solrQuery, xAxisField, from, to, unit);
@@ -360,44 +311,36 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
         }
         return dataList;
       } catch (SolrException | SolrServerException | IOException e) {
-        logger.error("Got exception for solr query :" + solrQuery,
-            e.getCause());
+        logger.error("Got exception for solr query :" + solrQuery, e.getCause());
       }
     }
     return null;
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList rangeStackGraph(String xAxisField, String stackField,
-      String from, String to, String unit, SolrDaoBase solrDaoBase,
-      SolrQuery solrQuery) {
+  private VBarDataList rangeStackGraph(String xAxisField, String stackField, String from, String to, String unit,
+      SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
     queryGenerator.setMainQuery(solrQuery, null);
     queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-    String jsonHistogramQuery = queryGenerator
-        .buildJSONFacetTermTimeRangeQuery(stackField, xAxisField, from, to,
-            unit).replace("\\", "");
+    String jsonHistogramQuery =
+        queryGenerator.buildJSONFacetTermTimeRangeQuery(stackField, xAxisField, from, to, unit).replace("\\", "");
     try {
       solrQuery.set("json.facet", jsonHistogramQuery);
       queryGenerator.setRowCount(solrQuery, 0);
       QueryResponse response = solrDaoBase.process(solrQuery);
       if (response != null) {
-        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-            .getResponse().get("facets");
-        if (jsonFacetResponse == null
-            || jsonFacetResponse.toString().equals("{count=0}")) {
-          // return
+        SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
+        if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
           return dataList;
         }
-        extractRangeStackValuesFromBucket(jsonFacetResponse, "x", "y",
-            histogramData);
+        extractRangeStackValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
         dataList.setGraphData(histogramData);
       }
       return dataList;
     } catch (SolrException | IOException | SolrServerException e) {
-      logger.error("Got exception for solr query :" + solrQuery,
-          e.getCause());
+      logger.error("Got exception for solr query :" + solrQuery, e.getCause());
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
index a813e96..49006e2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
@@ -25,95 +25,50 @@ import java.util.List;
 
 import org.apache.ambari.logsearch.manager.MgrBase;
 import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VNameValue;
-import org.apache.solr.client.solrj.response.RangeFacet;
+import org.apache.commons.lang.StringUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.springframework.beans.factory.annotation.Autowired;
 
-public class GraphDataGeneratorBase extends MgrBase {
-
-
-  @Autowired
-  DateUtil dateUtil;
+class GraphDataGeneratorBase extends MgrBase {
 
   @Autowired
-  StringUtil stringUtil;
+  private DateUtil dateUtil;
 
-  private static String BUCKETS = "buckets";
+  private static final String BUCKETS = "buckets";
   
-  public static enum DATA_TYPE {
-    LONG {
-      @Override
-      String getType() {
-        return "long";
-      }
-    },
-    DOUBLE {
-      @Override
-      String getType() {
-        return "double";
-      }
-    },
-    FLOAT {
-      @Override
-      String getType() {
-        return "long";
-      }
-    },
-    INT {
-      @Override
-      String getType() {
-        return "long";
-      }
-
-    };
-    abstract String getType();
+  private static enum DataType {
+    LONG("long"),
+    DOUBLE("double"),
+    FLOAT("long"),
+    INT("long");
+    
+    private String type;
+    
+    DataType(String type) {
+      this.type = type;
+    }
+    
+    String getType() {
+      return type;
+    }
   }
 
-  public static enum GRAPH_TYPE {
-    UNKNOWN {
-      @Override
-      int getType() {
-        return 0;
-      }
-    },
-    NORMAL_GRAPH {
-      @Override
-      int getType() {
-        return 1;
-      }
-    },
-    RANGE_NON_STACK_GRAPH {
-      @Override
-      int getType() {
-        return 2;
-      }
-    },
-    NON_RANGE_STACK_GRAPH {
-      @Override
-      int getType() {
-        return 3;
-      }
-    },
-    RANGE_STACK_GRAPH {
-      @Override
-      int getType() {
-        return 4;
-      }
-    };
-    abstract int getType();
+  protected static enum GraphType {
+    UNKNOWN,
+    NORMAL_GRAPH,
+    RANGE_NON_STACK_GRAPH,
+    NON_RANGE_STACK_GRAPH,
+    RANGE_STACK_GRAPH;
   }
 
   @SuppressWarnings("unchecked")
-  protected void extractRangeStackValuesFromBucket(
-      SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
+  protected void extractRangeStackValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
       String innerField, List<VBarGraphData> histogramData) {
     if (jsonFacetResponse != null) {
-      NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
-          .get(outerField);
+      NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse.get(outerField);
       if (stack != null) {
         ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get(BUCKETS);
         if (stackBuckets != null) {
@@ -121,26 +76,19 @@ public class GraphDataGeneratorBase extends MgrBase {
             VBarGraphData vBarGraphData = new VBarGraphData();
             SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) stackBucket;
             if (level != null) {
-              String name = level.getVal(0) != null ? level.getVal(0)
-                  .toString().toUpperCase() : "";
+              String name = level.getVal(0) != null ? level.getVal(0).toString().toUpperCase() : "";
               vBarGraphData.setName(name);
               Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-              NamedList<Object> innerFiledValue = (NamedList<Object>) level
-                  .get(innerField);
+              NamedList<Object> innerFiledValue = (NamedList<Object>) level.get(innerField);
               if (innerFiledValue != null) {
-                ArrayList<Object> levelBuckets = (ArrayList<Object>) innerFiledValue
-                    .get(BUCKETS);
+                ArrayList<Object> levelBuckets = (ArrayList<Object>) innerFiledValue.get(BUCKETS);
                 if (levelBuckets != null) {
                   for (Object levelBucket : levelBuckets) {
                     SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) levelBucket;
                     if (countValue != null) {
-                      String innerName = dateUtil
-                          .convertDateWithMillisecondsToSolrDate((Date) countValue
-                              .getVal(0));
-                      String innerValue = countValue.getVal(1) != null ? countValue
-                          .getVal(1).toString() : "";
-                      VNameValue vNameValue = new VNameValue(innerName,
-                          innerValue);
+                      String innerName = dateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
+                      String innerValue = countValue.getVal(1) != null ? countValue.getVal(1).toString() : "";
+                      VNameValue vNameValue = new VNameValue(innerName, innerValue);
                       vNameValues.add(vNameValue);
                     }
                   }
@@ -156,27 +104,23 @@ public class GraphDataGeneratorBase extends MgrBase {
   }
 
   @SuppressWarnings("unchecked")
-  protected boolean extractNonRangeStackValuesFromBucket(
-      SimpleOrderedMap<Object> jsonFacetResponse, String level,
+  protected boolean extractNonRangeStackValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String level,
       Collection<VBarGraphData> vGraphDatas, String typeXAxis) {
     boolean zeroFlag = true;
     if (jsonFacetResponse == null || jsonFacetResponse.get(level) == null
         || jsonFacetResponse.get(level).toString().equals("{count=0}")) {
       return false;
     }
-    NamedList<Object> levelList = (NamedList<Object>) jsonFacetResponse
-        .get(level);
+    NamedList<Object> levelList = (NamedList<Object>) jsonFacetResponse.get(level);
     if (levelList != null) {
       ArrayList<Object> bucketList = (ArrayList<Object>) levelList.get(BUCKETS);
       if (bucketList != null) {
         for (int index = 0; index < bucketList.size(); index++) {
-          SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList
-              .get(index);
+          SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList.get(index);
           if (valueCount != null && valueCount.size() > 2) {
             VBarGraphData vGraphData = new VBarGraphData();
             Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
-            String name = valueCount.getVal(0) != null ? valueCount.getVal(0)
-                .toString().trim() : "";
+            String name = valueCount.getVal(0) != null ? valueCount.getVal(0).toString().trim() : "";
             if (isTypeNumber(typeXAxis)) {
               VNameValue nameValue = new VNameValue();
               Double sumValue = (Double) valueCount.getVal(2);
@@ -188,24 +132,16 @@ public class GraphDataGeneratorBase extends MgrBase {
               nameValue.setValue(value);
               levelCounts.add(nameValue);
             } else {
-              SimpleOrderedMap<Object> valueCountMap = (SimpleOrderedMap<Object>) valueCount
-                  .getVal(2);
+              SimpleOrderedMap<Object> valueCountMap = (SimpleOrderedMap<Object>) valueCount.getVal(2);
               if (valueCountMap != null) {
-                ArrayList<Object> buckets = (ArrayList<Object>) valueCountMap
-                    .get(BUCKETS);
+                ArrayList<Object> buckets = (ArrayList<Object>) valueCountMap.get(BUCKETS);
                 if (buckets != null) {
                   for (int innerIndex = 0; innerIndex < buckets.size(); innerIndex++) {
-                    SimpleOrderedMap<Object> innerValueCount = (SimpleOrderedMap<Object>) buckets
-                        .get(innerIndex);
+                    SimpleOrderedMap<Object> innerValueCount = (SimpleOrderedMap<Object>) buckets.get(innerIndex);
                     if (innerValueCount != null) {
-                      String innerName = innerValueCount.getVal(0) != null ? innerValueCount
-                          .getVal(0).toString().trim()
-                          : "";
-                      String innerValue = innerValueCount.getVal(1) != null ? innerValueCount
-                          .getVal(1).toString().trim()
-                          : "";
-                      VNameValue nameValue = new VNameValue(innerName,
-                          innerValue);
+                      String innerName = innerValueCount.getVal(0) != null ? innerValueCount.getVal(0).toString().trim() : "";
+                      String innerValue = innerValueCount.getVal(1) != null ? innerValueCount.getVal(1).toString().trim() : "";
+                      VNameValue nameValue = new VNameValue(innerName, innerValue);
                       levelCounts.add(nameValue);
                     }
                   }
@@ -222,59 +158,12 @@ public class GraphDataGeneratorBase extends MgrBase {
     return zeroFlag;
   }
 
-  @SuppressWarnings("unchecked")
-  protected boolean extractValuesFromJson(
-      SimpleOrderedMap<Object> jsonFacetResponse, String level,
-      VBarGraphData histogramData, List<RangeFacet.Count> counts) {
-    histogramData.setName(level);
-    Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
-    histogramData.setDataCounts(levelCounts);
-    boolean zeroFlag = true;
-    if (jsonFacetResponse.get(level).toString().equals("{count=0}")) {
-      if (counts != null) {
-        for (RangeFacet.Count date : counts) {
-          VNameValue nameValue = new VNameValue();
-          nameValue.setName(date.getValue());
-          nameValue.setValue("0");
-          levelCounts.add(nameValue);
-        }
-      }
-      return false;
-    }
-    NamedList<Object> levelList = (NamedList<Object>) jsonFacetResponse
-        .get(level);
-    if (levelList != null && counts != null && levelList.size() > 1) {
-      NamedList<Object> levelValues = (NamedList<Object>) levelList.getVal(1);
-      if (levelValues != null) {
-        ArrayList<Object> bucketList = (ArrayList<Object>) levelValues
-            .get(BUCKETS);
-        int i = 0;
-        for (RangeFacet.Count date : counts) {
-          SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList
-              .get(i);
-          if (valueCount != null) {
-            Double count = (Double) valueCount.getVal(1);
-            if (count != null && !count.equals(0D)) {
-              zeroFlag = false;
-            }
-            String name = date.getValue();
-            String value = count != null ? "" + count.longValue() : "0";
-            VNameValue nameValue = new VNameValue(name, value);
-            levelCounts.add(nameValue);
-          }
-          i++;
-        }
-      }
-    }
-    return zeroFlag;
-  }
-
   protected boolean isTypeNumber(String typeXAxis) {
-    if (stringUtil.isEmpty(typeXAxis)) {
+    if (StringUtils.isBlank(typeXAxis)) {
       return false;
     } else {
-      return typeXAxis.contains(DATA_TYPE.LONG.getType()) || typeXAxis.contains(DATA_TYPE.INT.getType())
-          || typeXAxis.contains(DATA_TYPE.FLOAT.getType()) || typeXAxis.contains(DATA_TYPE.DOUBLE.getType());
+      return typeXAxis.contains(DataType.LONG.getType()) || typeXAxis.contains(DataType.INT.getType())
+          || typeXAxis.contains(DataType.FLOAT.getType()) || typeXAxis.contains(DataType.DOUBLE.getType());
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
index ab287bc..947fdbb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
@@ -41,10 +41,8 @@ import org.apache.ambari.logsearch.graph.GraphDataGenerator;
 import org.apache.ambari.logsearch.util.BizUtil;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VGroupList;
@@ -69,54 +67,41 @@ import org.springframework.stereotype.Component;
 
 @Component
 public class AuditMgr extends MgrBase {
-  static Logger logger = Logger.getLogger(AuditMgr.class); 
+  private static final Logger logger = Logger.getLogger(AuditMgr.class); 
 
   @Autowired
-  AuditSolrDao auditSolrDao;
-
+  private AuditSolrDao auditSolrDao;
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
+  private RESTErrorUtil restErrorUtil;
   @Autowired
-  JSONUtil jsonUtil;
-
+  private BizUtil bizUtil;
   @Autowired
-  StringUtil stringUtil;
-
+  private DateUtil dateUtil;
   @Autowired
-  BizUtil bizUtil;
-
-  @Autowired
-  DateUtil dateUtil;
-
-  @Autowired
-  GraphDataGenerator graphDataGenerator;
+  private GraphDataGenerator graphDataGenerator;
 
   public String getLogs(SearchCriteria searchCriteria) {
     String lastPage = (String)  searchCriteria.getParamValue("isLastPage");
     Boolean isLastPage = Boolean.parseBoolean(lastPage);
      if (isLastPage) {
        SolrQuery lastPageQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-      VSolrLogList collection = getLastPage(searchCriteria,LogSearchConstants.AUDIT_EVTTIME,auditSolrDao,lastPageQuery);
+      VSolrLogList collection = getLastPage(searchCriteria, LogSearchConstants.AUDIT_EVTTIME, auditSolrDao, lastPageQuery);
       if(collection == null){
         collection = new VSolrLogList();
       }
       return convertObjToString(collection);
     }
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    VSolrLogList collection = getLogAsPaginationProvided(solrQuery,
-        auditSolrDao);
+    VSolrLogList collection = getLogAsPaginationProvided(solrQuery, auditSolrDao);
     return convertObjToString(collection);
 
   }
 
-  public SolrDocumentList getComponents(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+  private SolrDocumentList getComponents(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     SolrDocumentList docList = new SolrDocumentList();
     try {
-      queryGenerator.setFacetField(solrQuery,
-        LogSearchConstants.AUDIT_COMPONENT);
+      queryGenerator.setFacetField(solrQuery, LogSearchConstants.AUDIT_COMPONENT);
       queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
       List<FacetField> facetFields = null;
       List<Count> componentsCount = new ArrayList<Count>();
@@ -146,8 +131,7 @@ public class AuditMgr extends MgrBase {
       return docList;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -167,14 +151,10 @@ public class AuditMgr extends MgrBase {
     String from = getFrom((String) searchCriteria.getParamValue("startTime"));
     String to = getTo((String) searchCriteria.getParamValue("endTime"));
     String unit = getUnit((String) searchCriteria.getParamValue("unit"));
-    
-    
 
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(
-      LogSearchConstants.AUDIT_COMPONENT,
-      LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\",
-      "");
+    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_COMPONENT,
+      LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
 
     try {
       queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
@@ -183,24 +163,20 @@ public class AuditMgr extends MgrBase {
       if (response == null){
         return convertObjToString(dataList);
       }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-        .getResponse().get("facets");
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
 
-      if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}")){
+      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
         return convertObjToString(dataList);
       }
 
-      extractValuesFromBucket(jsonFacetResponse, "x", "y",
-        histogramData);
+      extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
 
       dataList.setGraphData(histogramData);
       return convertObjToString(dataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
 
     }
   }
@@ -212,8 +188,7 @@ public class AuditMgr extends MgrBase {
     if (top == null){
       top = new Integer(topCounts);
     }
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     try {
 
       List<VNameValue> nameValues = new ArrayList<VNameValue>();
@@ -226,13 +201,12 @@ public class AuditMgr extends MgrBase {
 
       List<Count> countList = new ArrayList<FacetField.Count>();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if(queryResponse == null){
+      if (queryResponse == null) {
         return convertObjToString(nameValueList);
       }
       
       if (queryResponse.getFacetField(facetField) != null) {
-        FacetField queryFacetField = queryResponse
-          .getFacetField(facetField);
+        FacetField queryFacetField = queryResponse.getFacetField(facetField);
         if (queryFacetField != null) {
           countList = queryFacetField.getValues();
         }
@@ -249,8 +223,7 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -260,16 +233,12 @@ public class AuditMgr extends MgrBase {
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setParam("event", "/audit/logs/live/count");
     try {
-      String startDate = dateUtil
-        .convertGivenDateFormatToSolrDateFormat(ManageStartEndTime.startDate);
-
-      String endDate = dateUtil
-        .convertGivenDateFormatToSolrDateFormat(ManageStartEndTime.endDate);
+      Date[] timeRange = ManageStartEndTime.getStartEndTime();
+      String startDate = dateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[0]);
+      String endDate = dateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[1]);
 
       queryGenerator.setMainQuery(solrQuery, null);
-      queryGenerator.setFacetRange(solrQuery,
-        LogSearchConstants.AUDIT_EVTTIME, startDate, endDate,
-        "+2MINUTE");
+      queryGenerator.setFacetRange(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startDate, endDate, "+2MINUTE");
       List<RangeFacet.Count> listCount;
 
       QueryResponse response = auditSolrDao.process(solrQuery);
@@ -301,22 +270,21 @@ public class AuditMgr extends MgrBase {
     } catch (SolrException | SolrServerException | ParseException
       | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String topTenUsers(SearchCriteria searchCriteria) {
 
-    String jsonUserQuery = "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+    String jsonUserQuery =
+        "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
     queryGenerator.setRowCount(solrQuery, 0);
     try {
       VBarDataList vBarDataList = new VBarDataList();
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if(queryResponse == null){
+      if (queryResponse == null) {
         return convertObjToString(vBarDataList);
       }
 
@@ -327,12 +295,11 @@ public class AuditMgr extends MgrBase {
       }
 
       @SuppressWarnings("unchecked")
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
-        .get("facets");
-      if(jsonFacetResponse == null){
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
+      if (jsonFacetResponse == null) {
         return convertObjToString(vBarDataList);
       }
-      if(jsonFacetResponse.toString().equals("{count=0}")){
+      if (jsonFacetResponse.toString().equals("{count=0}")) {
         return convertObjToString(vBarDataList);
       }
       vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
@@ -340,16 +307,15 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
   public String topTenResources(SearchCriteria searchCriteria) {
 
-    String jsonUserQuery = "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+    String jsonUserQuery =
+        "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     queryGenerator.setJSONFacet(solrQuery, jsonUserQuery);
     queryGenerator.setRowCount(solrQuery, 0);
     try {
@@ -365,16 +331,14 @@ public class AuditMgr extends MgrBase {
       }
 
       @SuppressWarnings("unchecked")
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
-        .get("facets");
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
 
       vBarDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
       return convertObjToString(vBarDataList);
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -385,19 +349,15 @@ public class AuditMgr extends MgrBase {
     String to = getTo((String) searchCriteria.getParamValue("endTime"));
     String unit = getUnit((String) searchCriteria.getParamValue("unit"));
     
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
     VBarDataList dataList = new VBarDataList();
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
 
     queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
 
-    String jsonHistogramQuery = queryGenerator
-      .buildJSONFacetTermTimeRangeQuery(
-        LogSearchConstants.AUDIT_REQUEST_USER,
-        LogSearchConstants.AUDIT_EVTTIME, from, to, unit)
-      .replace("\\", "");
+    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_REQUEST_USER,
+        LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
 
     try {
       queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
@@ -406,11 +366,9 @@ public class AuditMgr extends MgrBase {
       if (response == null){
         return convertObjToString(dataList);
       }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
-        .getResponse().get("facets");
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
 
-      if (jsonFacetResponse == null
-        || jsonFacetResponse.toString().equals("{count=0}")){
+      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
         return convertObjToString(dataList);
       }
       extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
@@ -420,63 +378,19 @@ public class AuditMgr extends MgrBase {
 
     } catch (SolrException | IOException | SolrServerException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
   }
 
-  public SolrDocumentList getRequestUser(SearchCriteria searchCriteria) {
-    SolrDocumentList docList = new SolrDocumentList();
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
-    try {
-      queryGenerator.setFacetField(solrQuery,
-        LogSearchConstants.AUDIT_REQUEST_USER);
-      queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-      List<FacetField> facetFields = null;
-      List<Count> componentsCount = new ArrayList<Count>();
-      FacetField facetField = null;
-
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if (queryResponse == null) {
-        return docList;
-      }
-
-      facetFields = queryResponse.getFacetFields();
-      if (facetFields == null) {
-        return docList;
-      }
-      if (!facetFields.isEmpty()) {
-        facetField = facetFields.get(0);
-      }
-      if (facetField != null) {
-        componentsCount = facetField.getValues();
-      }
-     
-      for (Count compnonet : componentsCount) {
-        SolrDocument solrDocument = new SolrDocument();
-        solrDocument.addField("type", compnonet.getName());
-        docList.add(solrDocument);
-      }
-      return docList;
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
   public String getAuditLogsSchemaFieldsName() {
-    String excludeArray[] = PropertiesUtil
-        .getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
+    String excludeArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
     List<String> fieldNames = new ArrayList<String>();
     HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
     ConfigUtil.getSchemaFieldsName(excludeArray, fieldNames,auditSolrDao);
 
     for (String fieldName : fieldNames) {
-      String uiField = ConfigUtil.auditLogsColumnMapping.get(fieldName
-          + LogSearchConstants.SOLR_SUFFIX);
+      String uiField = ConfigUtil.auditLogsColumnMapping.get(fieldName + LogSearchConstants.SOLR_SUFFIX);
       if (uiField == null) {
         uiFieldColumnMapping.put(fieldName, fieldName);
       } else {
@@ -492,8 +406,7 @@ public class AuditMgr extends MgrBase {
   public String getAnyGraphData(SearchCriteria searchCriteria) {
     searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
-        auditSolrDao, solrQuery);
+    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria, auditSolrDao, solrQuery);
     if (result == null) {
       result = new VBarDataList();
     }
@@ -502,13 +415,10 @@ public class AuditMgr extends MgrBase {
   }
 
   @SuppressWarnings("unchecked")
-  public void extractValuesFromBucket(
-    SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
-    String innerField, List<VBarGraphData> histogramData) {
-    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
-      .get(outerField);
-    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack
-      .get("buckets");
+  private void extractValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String outerField, String innerField,
+      List<VBarGraphData> histogramData) {
+    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse.get(outerField);
+    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get("buckets");
     for (Object temp : stackBuckets) {
       VBarGraphData vBarGraphData = new VBarGraphData();
 
@@ -518,13 +428,10 @@ public class AuditMgr extends MgrBase {
 
       Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
       vBarGraphData.setDataCounts(vNameValues);
-      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level
-        .get(innerField)).get("buckets");
+      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level.get(innerField)).get("buckets");
       for (Object temp1 : levelBuckets) {
         SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = dateUtil
-          .convertDateWithMillisecondsToSolrDate((Date) countValue
-            .getVal(0));
+        String value = dateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
 
         String count = "" + countValue.getVal(1);
         VNameValue vNameValue = new VNameValue();
@@ -536,12 +443,12 @@ public class AuditMgr extends MgrBase {
     }
   }
 
-  @SuppressWarnings({"unchecked", "resource"})
+  @SuppressWarnings({"unchecked"})
   public Response exportUserTableToTextFile(SearchCriteria searchCriteria) {
-    String jsonUserQuery = "{ Users: { type: terms, field: reqUser, facet:  {Repo: {  type: terms, field: repo, facet: {  eventCount: \"sum(event_count)\"}}}},x:{ type: terms,field: resource, facet: {y: {  type: terms, field: repo,facet: {  eventCount: \"sum(event_count)\"}}}}}";
+    String jsonUserQuery =
+        "{ Users: { type: terms, field: reqUser, facet:  {Repo: {  type: terms, field: repo, facet: {  eventCount: \"sum(event_count)\"}}}},x:{ type: terms,field: resource, facet: {y: {  type: terms, field: repo,facet: {  eventCount: \"sum(event_count)\"}}}}}";
 
-    SolrQuery solrQuery = queryGenerator
-      .commonAuditFilterQuery(searchCriteria);
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     String startTime = (String) searchCriteria.getParamValue("startTime");
     String endTime = (String) searchCriteria.getParamValue("endTime");
 
@@ -562,7 +469,7 @@ public class AuditMgr extends MgrBase {
       }
 
       NamedList<Object> namedList = queryResponse.getResponse();
-      if(namedList == null){
+      if (namedList == null) {
         VResponse response = new VResponse();
         response.setMsgDesc("Query was not able to execute "+solrQuery);
         throw restErrorUtil.createRESTException(response);
@@ -570,12 +477,9 @@ public class AuditMgr extends MgrBase {
       VBarDataList vBarUserDataList = new VBarDataList();
       VBarDataList vBarResourceDataList = new VBarDataList();
 
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList
-        .get("facets");
-      vBarUserDataList = bizUtil
-        .buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
-      vBarResourceDataList = bizUtil
-        .buildSummaryForTopCounts(jsonFacetResponse,"y","x");
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
+      vBarUserDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
+      vBarResourceDataList = bizUtil.buildSummaryForTopCounts(jsonFacetResponse,"y","x");
       String data = "";
       String summary = "";
       if ("text".equals(dataFormat)) {
@@ -584,8 +488,7 @@ public class AuditMgr extends MgrBase {
         summary += "\n\n\n\n";
         data += addBlank("Users") + "Components/Access" + "\n";
         data += "--------------------------------------------------------------------------\n";
-        Collection<VBarGraphData> tableUserData = vBarUserDataList
-          .getGraphData();
+        Collection<VBarGraphData> tableUserData = vBarUserDataList.getGraphData();
         for (VBarGraphData graphData : tableUserData) {
           String userName = graphData.getName();
           String largeUserName = "";
@@ -596,13 +499,11 @@ public class AuditMgr extends MgrBase {
           } else
             data += addBlank(userName);
 
-          Collection<VNameValue> vnameValueList = graphData
-            .getDataCount();
+          Collection<VNameValue> vnameValueList = graphData.getDataCount();
           int count = 0;
           String blank = "";
           for (VNameValue vNameValue : vnameValueList) {
-            data += blank + vNameValue.getName() + " "
-              + vNameValue.getValue() + "\n";
+            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
             if (count == 0)
               blank = addBlank(blank);
             count++;
@@ -617,8 +518,7 @@ public class AuditMgr extends MgrBase {
         data += "\n\n\n\n\n\n";
         data += addBlank("Resources") + "Components/Access" + "\n";
         data += "--------------------------------------------------------------------------\n";
-        Collection<VBarGraphData> tableResourceData = vBarResourceDataList
-          .getGraphData();
+        Collection<VBarGraphData> tableResourceData = vBarResourceDataList.getGraphData();
         for (VBarGraphData graphData : tableResourceData) {
           String resourceName = graphData.getName();
           String largeResourceName = resourceName;
@@ -631,13 +531,11 @@ public class AuditMgr extends MgrBase {
 
           //resourceName = resourceName.replaceAll("(.{45})", resourceName.substring(0, 45)+"\n");
           data += addBlank(resourceName);
-          Collection<VNameValue> vnameValueList = graphData
-            .getDataCount();
+          Collection<VNameValue> vnameValueList = graphData.getDataCount();
           int count = 0;
           String blank = "";
           for (VNameValue vNameValue : vnameValueList) {
-            data += blank + vNameValue.getName() + " "
-              + vNameValue.getValue() + "\n";
+            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
             if (count == 0)
               blank = addBlank(blank);
             count++;
@@ -660,22 +558,19 @@ public class AuditMgr extends MgrBase {
         data = "{" + convertObjToString(vBarUserDataList) + "," + convertObjToString(vBarResourceDataList) + "}";
         dataFormat = "json";
       }
-      String fileName = "Users_Resource" + startTime + endTime
-        + ".";
+      String fileName = "Users_Resource" + startTime + endTime + ".";
       File file = File.createTempFile(fileName, dataFormat);
 
       fis = new FileOutputStream(file);
       fis.write(data.getBytes());
       return Response
         .ok(file, MediaType.APPLICATION_OCTET_STREAM)
-        .header("Content-Disposition",
-          "attachment;filename=" + fileName + dataFormat)
+        .header("Content-Disposition", "attachment;filename=" + fileName + dataFormat)
         .build();
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     } finally {
       if (fis != null) {
         try {
@@ -704,21 +599,18 @@ public class AuditMgr extends MgrBase {
     SolrQuery serivceLoadQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
     try {
-      queryGenerator.setFacetField(serivceLoadQuery,
-        LogSearchConstants.AUDIT_COMPONENT);
-      QueryResponse serviceLoadResponse = auditSolrDao
-        .process(serivceLoadQuery);
+      queryGenerator.setFacetField(serivceLoadQuery, LogSearchConstants.AUDIT_COMPONENT);
+      QueryResponse serviceLoadResponse = auditSolrDao.process(serivceLoadQuery);
       if (serviceLoadResponse == null){
         return convertObjToString(dataList);
       }
-      FacetField serviceFacetField =serviceLoadResponse.getFacetField(
-          LogSearchConstants.AUDIT_COMPONENT);
-      if(serviceFacetField == null){
+      FacetField serviceFacetField =serviceLoadResponse.getFacetField(LogSearchConstants.AUDIT_COMPONENT);
+      if (serviceFacetField == null) {
         return convertObjToString(dataList);
       }
       
       List<Count> serviceLoadFacets = serviceFacetField.getValues();
-      if(serviceLoadFacets == null){
+      if (serviceLoadFacets == null) {
         return convertObjToString(dataList);
       }
       for (Count cnt : serviceLoadFacets) {
@@ -733,70 +625,11 @@ public class AuditMgr extends MgrBase {
         vBarGraphData.setDataCounts(valueList);
       }
 
-
       return convertObjToString(dataList);
 
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-    
-  /*  
-    String preDefinedJSON = getHadoopServiceConfigJSON();
-    try {
-      JSONObject serviceJSON = new JSONObject(preDefinedJSON).getJSONObject("service");
-      HashMap<String, Object> serviceMap = jsonUtil.jsonToMapObject(serviceJSON.toString());
-      Iterator<Entry<String, Object>> serviceMapIterator= serviceMap.entrySet().iterator();
-      List<VNameValue> newValueList = new ArrayList<VNameValue>();
-      for (VNameValue vNameValue : valueList) {
-        String name=vNameValue.getName();
-        while (serviceMapIterator.hasNext()) {
-          Map.Entry<String, Object> tempMap = serviceMapIterator
-              .next();
-          
-          String keyName = tempMap.getKey();
-          
-          JSONObject valueObj = new JSONObject(tempMap.toString().replace(keyName+"=", ""));
-          if(name.contains(keyName.toLowerCase())){
-            vNameValue.setName(valueObj.getString("label"));
-            break;
-          }
-          JSONArray componentsArray = valueObj.getJSONArray("components");
-          
-          for(int i =0;i< componentsArray.length();i++){
-            JSONObject jObj = componentsArray.getJSONObject(i);
-            String jsonName = jObj.getString("name");
-            if(name.contains(jsonName.toLowerCase())){
-              vNameValue.setName(valueObj.getString("label"));
-              break;
-            }
-          }
-          
-        }
-        if(newValueList.isEmpty()){
-          newValueList.add(vNameValue);
-        }else{
-          boolean isMatch = false;
-          for(VNameValue vValue: newValueList){
-            if(vValue.getName().equalsIgnoreCase(vNameValue.getName())){
-              isMatch =true;
-              Integer cnt1 = Integer.parseInt(vValue.getValue());
-              Integer cnt2 = Integer.parseInt(vNameValue.getValue());
-              vValue.setValue((cnt1+cnt2)+"");
-            }
-          }
-          if(!isMatch)
-            newValueList.add(vNameValue);
-        }
-      }
-      vBarGraphData.setDataCounts(newValueList);
-      vBarGraphData.setName("ServiceList");
-      return convertObjToString(dataList);
-      
-    } catch (Exception e) {
-      throw restErrorUtil.createRESTException(e.getMessage(),
-          MessageEnums.ERROR_SYSTEM);
-    }*/
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
index 1bd9a78..8badb61 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileMgr.java
@@ -32,6 +32,7 @@ import org.apache.ambari.logsearch.view.VLogFile;
 import org.apache.ambari.logsearch.view.VLogFileList;
 import org.apache.ambari.logsearch.view.VSolrLogList;
 import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -46,24 +47,13 @@ import org.springframework.stereotype.Component;
 @Component
 public class LogFileMgr extends MgrBase {
 
-  private static Logger logger = Logger.getLogger(LogFileMgr.class);
-
+  private static final Logger logger = Logger.getLogger(LogFileMgr.class);
 
   @Autowired
-  ServiceLogsSolrDao serviceLogsSolrDao;
-
+  private ServiceLogsSolrDao serviceLogsSolrDao;
   @Autowired
-  AuditSolrDao auditSolrDao;
+  private AuditSolrDao auditSolrDao;
 
-  @Autowired
-  LogsMgr logMgr;
-
-  /**
-   * Search logFiles
-   *
-   * @param searchCriteria
-   * @return
-   */
   public String searchLogFiles(SearchCriteria searchCriteria) {
     VLogFileList logFileList = new VLogFileList();
     List<VLogFile> logFiles = new ArrayList<VLogFile>();
@@ -72,31 +62,26 @@ public class LogFileMgr extends MgrBase {
     int minCount = 1;// to remove zero count facet
     SolrQuery solrQuery = new SolrQuery();
     queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH,
-        minCount);
+    queryGenerator.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
     // adding filter
-    queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.SOLR_COMPONENT, componentName);
-    queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.SOLR_HOST, host);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, componentName);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, host);
     try {
       String logType = (String) searchCriteria.getParamValue("logType");
-      if (stringUtil.isEmpty(logType)) {
-        logType = LOG_TYPE.SERVICE.name();// default is service Log
+      if (StringUtils.isBlank(logType)) {
+        logType = LogType.SERVICE.name();// default is service Log
       }
       SolrDaoBase daoMgr = null;
-      if (logType.equalsIgnoreCase(LOG_TYPE.SERVICE.name())) {
+      if (logType.equalsIgnoreCase(LogType.SERVICE.name())) {
         daoMgr = serviceLogsSolrDao;
-      } else if (logType.equalsIgnoreCase(LOG_TYPE.AUDIT.name())) {
+      } else if (logType.equalsIgnoreCase(LogType.AUDIT.name())) {
         daoMgr = auditSolrDao;
       } else {
-        throw restErrorUtil.createRESTException(logType
-            + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
+        throw restErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
       }
       QueryResponse queryResponse = daoMgr.process(solrQuery);
       if (queryResponse.getFacetField(LogSearchConstants.SOLR_PATH) != null) {
-        FacetField queryFacetField = queryResponse
-            .getFacetField(LogSearchConstants.SOLR_PATH);
+        FacetField queryFacetField = queryResponse.getFacetField(LogSearchConstants.SOLR_PATH);
         if (queryFacetField != null) {
           List<Count> countList = queryFacetField.getValues();
           for (Count count : countList) {
@@ -110,10 +95,8 @@ public class LogFileMgr extends MgrBase {
         }
       }
     } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error in solr query  :" + e.getLocalizedMessage()
-          + "\n Query :" + solrQuery.toQueryString(), e.getCause());
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      logger.error("Error in solr query  :" + e.getLocalizedMessage() + "\n Query :" + solrQuery.toQueryString(), e.getCause());
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     logFileList.setLogFiles(logFiles);
     String jsonStr = "";
@@ -127,28 +110,22 @@ public class LogFileMgr extends MgrBase {
     String logFile = (String) searchCriteria.getParamValue("name");
     String component = (String) searchCriteria.getParamValue("component");
     String tailSize = (String) searchCriteria.getParamValue("tailSize");
-    if (stringUtil.isEmpty(host)) {
-      throw restErrorUtil.createRESTException("missing Host Name",
-        MessageEnums.ERROR_SYSTEM);
+    if (StringUtils.isBlank(host)) {
+      throw restErrorUtil.createRESTException("missing Host Name", MessageEnums.ERROR_SYSTEM);
     }
-    tailSize = (stringUtil.isEmpty(tailSize)) ? "10" : tailSize;
+    tailSize = (StringUtils.isBlank(tailSize)) ? "10" : tailSize;
     SolrQuery logFileTailQuery = new SolrQuery();
     try {
       int tail = Integer.parseInt(tailSize);
       tail = tail > 100 ? 100 : tail;
       queryGenerator.setMainQuery(logFileTailQuery, null);
-      queryGenerator.setSingleIncludeFilter(logFileTailQuery,
-        LogSearchConstants.SOLR_HOST, host);
-      if (!stringUtil.isEmpty(logFile)) {
-        queryGenerator.setSingleIncludeFilter(logFileTailQuery,
-          LogSearchConstants.SOLR_PATH,
-          solrUtil.makeSolrSearchString(logFile));
-      } else if (!stringUtil.isEmpty(component)) {
-        queryGenerator.setSingleIncludeFilter(logFileTailQuery,
-          LogSearchConstants.SOLR_COMPONENT, component);
+      queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_HOST, host);
+      if (!StringUtils.isBlank(logFile)) {
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_PATH, solrUtil.makeSolrSearchString(logFile));
+      } else if (!StringUtils.isBlank(component)) {
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_COMPONENT, component);
       } else {
-        throw restErrorUtil.createRESTException("component or logfile parameter must be present",
-          MessageEnums.ERROR_SYSTEM);
+        throw restErrorUtil.createRESTException("component or logfile parameter must be present", MessageEnums.ERROR_SYSTEM);
       }
 
       queryGenerator.setRowCount(logFileTailQuery, tail);


[25/50] [abbrv] ambari git commit: AMBARI-18279. Use the PropertiesFile python command to create property files instead of jinja templates (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18279. Use the PropertiesFile python command to create property files instead of jinja templates (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3f790c8f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3f790c8f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3f790c8f

Branch: refs/heads/branch-dev-logsearch
Commit: 3f790c8f2f384a8a4cd4629f5da62f814fcaf65d
Parents: e9e834b
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Sep 7 22:17:05 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:00 2016 +0200

----------------------------------------------------------------------
 .../configuration/logsearch-properties.xml      |   6 +-
 .../LOGSEARCH/0.5.0/package/scripts/params.py   | 125 +++++++++----------
 .../0.5.0/package/scripts/setup_logfeeder.py    |  10 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |   7 +-
 .../package/templates/logfeeder.properties.j2   |  32 -----
 .../package/templates/logsearch.properties.j2   |  72 -----------
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |  18 ++-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |  30 ++++-
 .../test/python/stacks/2.4/configs/default.json |  13 +-
 ambari-web/app/data/HDP2/site_properties.js     |   6 +-
 10 files changed, 120 insertions(+), 199 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
index 56ff2d1..e521c8e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml
@@ -133,7 +133,7 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>logsearch.external.auth.enabled</name>
+    <name>logsearch.auth.external_auth.enable</name>
     <value>true</value>
     <display-name>External authentication</display-name>
     <description>Enable external authentication</description>
@@ -143,14 +143,14 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>logsearch.external.auth.host_url</name>
+    <name>logsearch.auth.external_auth.host_url</name>
     <value>{ambari_server_auth_host_url}</value>
     <display-name>External authentication url - host and port</display-name>
     <description>The host and port of the external server used to authenticate</description>
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>logsearch.external.auth.login_url</name>
+    <name>logsearch.auth.external_auth.login_url</name>
     <value>/api/v1/users/$USERNAME/privileges?fields=*</value>
     <display-name>External authentication url - path and query</display-name>
     <description>The path and query of the external server used to authenticate</description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index 5c3954f..8b0ce75 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -75,9 +75,9 @@ if 'metrics_collector_hosts' in config['clusterHostInfo']:
 else:
   metrics_collector_hosts = ''
 
-logsearch_solr_metrics_collector_hosts = format(config['configurations']['logsearch-properties']['logsearch.solr.metrics.collector.hosts'])
-
+#####################################
 # Infra Solr configs
+#####################################
 infra_solr_znode = default('/configurations/infra-solr-env/infra_solr_znode', '/infra-solr')
 infra_solr_instance_count = len(config['clusterHostInfo']['infra_solr_hosts'])
 infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False)
@@ -108,57 +108,17 @@ if security_enabled:
 #####################################
 logsearch_dir = '/usr/lib/ambari-logsearch-portal'
 
-logsearch_collection_service_logs_numshards_config = config['configurations']['logsearch-properties']['logsearch.collection.service.logs.numshards']
-logsearch_collection_audit_logs_numshards_config = config['configurations']['logsearch-properties']['logsearch.collection.audit.logs.numshards']
-
-if logsearch_collection_service_logs_numshards_config > 0:
-  logsearch_collection_service_logs_numshards = str(logsearch_collection_service_logs_numshards_config)
-else:
-  logsearch_collection_service_logs_numshards = format(str(infra_solr_instance_count))
-
-if logsearch_collection_audit_logs_numshards_config > 0:
-  logsearch_collection_audit_logs_numshards = str(logsearch_collection_audit_logs_numshards_config)
-else:
-  logsearch_collection_audit_logs_numshards = format(str(infra_solr_instance_count))
-
-logsearch_collection_service_logs_replication_factor = str(config['configurations']['logsearch-properties']['logsearch.collection.service.logs.replication.factor'])
-logsearch_collection_audit_logs_replication_factor = str(config['configurations']['logsearch-properties']['logsearch.collection.audit.logs.replication.factor'])
-
-logsearch_solr_collection_service_logs = default('/configurations/logsearch-properties/logsearch.solr.collection.service.logs', 'hadoop_logs')
-logsearch_solr_collection_audit_logs = default('/configurations/logsearch-properties/logsearch.solr.collection.audit.logs','audit_logs')
-
 logsearch_service_logs_max_retention = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_max_retention']
 logsearch_service_logs_merge_factor = config['configurations']['logsearch-service_logs-solrconfig']['logsearch_service_logs_merge_factor']
-logsearch_service_logs_fields = config['configurations']['logsearch-properties']['logsearch.service.logs.fields']
-logsearch_service_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.service.logs.split.interval.mins']
 
 logsearch_audit_logs_max_retention = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_max_retention']
 logsearch_audit_logs_merge_factor = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_merge_factor']
-logsearch_audit_logs_split_interval_mins = config['configurations']['logsearch-properties']['logsearch.audit.logs.split.interval.mins']
 
-logsearch_logfeeder_include_default_level = default('/configurations/logsearch-properties/logsearch.logfeeder.include.default.level', 'fatal,error,warn')
-
-logsearch_solr_audit_logs_zk_node = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_node', zookeeper_quorum)
-logsearch_solr_audit_logs_zk_quorum = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_quorum', infra_solr_znode)
+logsearch_solr_audit_logs_zk_node = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_node', infra_solr_znode)
+logsearch_solr_audit_logs_zk_quorum = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_quorum', zookeeper_quorum)
 logsearch_solr_audit_logs_zk_node = format(logsearch_solr_audit_logs_zk_node)
 logsearch_solr_audit_logs_zk_quorum = format(logsearch_solr_audit_logs_zk_quorum)
 
-# create custom properties - remove defaults
-logsearch_custom_properties = dict(config['configurations']['logsearch-properties'])
-logsearch_custom_properties.pop("logsearch.service.logs.fields", None)
-logsearch_custom_properties.pop("logsearch.audit.logs.split.interval.mins", None)
-logsearch_custom_properties.pop("logsearch.collection.service.logs.replication.factor", None)
-logsearch_custom_properties.pop("logsearch.solr.collection.service.logs", None)
-logsearch_custom_properties.pop("logsearch.solr.metrics.collector.hosts", None)
-logsearch_custom_properties.pop("logsearch.solr.collection.audit.logs", None)
-logsearch_custom_properties.pop("logsearch.logfeeder.include.default.level", None)
-logsearch_custom_properties.pop("logsearch.collection.audit.logs.replication.factor", None)
-logsearch_custom_properties.pop("logsearch.collection.service.logs.numshards", None)
-logsearch_custom_properties.pop("logsearch.service.logs.split.interval.mins", None)
-logsearch_custom_properties.pop("logsearch.collection.audit.logs.numshards", None)
-logsearch_custom_properties.pop("logsearch.external.auth.enabled", None)
-logsearch_custom_properties.pop("logsearch.external.auth.host_url", None)
-logsearch_custom_properties.pop("logsearch.external.auth.login_url", None)
 
 # logsearch-env configs
 logsearch_user = config['configurations']['logsearch-env']['logsearch_user']
@@ -215,9 +175,7 @@ hdfs_user = default('configurations/hadoop-env/hdfs_user', 'hdfs')
 mapred_user =  default('configurations/mapred-env/mapred_user', 'mapred')
 yarn_user =  default('configurations/yarn-env/yarn_user', 'yarn')
 
-#####################################
 # Logsearch auth configs
-#####################################
 
 logsearch_admin_credential_file = 'logsearch-admin.json'
 logsearch_admin_username = default('/configurations/logsearch-admin-json/logsearch_admin_username', "admin")
@@ -236,9 +194,42 @@ if 'ambari_server_host' in config['clusterHostInfo']:
 else:
   ambari_server_auth_host_url = ''
 
-logsearch_auth_external_enabled = str(config['configurations']['logsearch-properties']['logsearch.external.auth.enabled']).lower()
-logsearch_auth_external_host_url = format(config['configurations']['logsearch-properties']['logsearch.external.auth.host_url'])
-logsearch_auth_external_login_url = config['configurations']['logsearch-properties']['logsearch.external.auth.login_url']
+# Logsearch propreties
+
+logsearch_properties = dict(config['configurations']['logsearch-properties'])
+
+logsearch_properties['logsearch.solr.zk_connect_string'] = zookeeper_quorum + infra_solr_znode
+logsearch_properties['logsearch.solr.audit.logs.zk_connect_string'] = logsearch_solr_audit_logs_zk_quorum + logsearch_solr_audit_logs_zk_node
+
+logsearch_properties['logsearch.solr.collection.history'] = 'history'
+logsearch_properties['logsearch.solr.history.config.name'] = 'history'
+logsearch_properties['logsearch.collection.history.replication.factor'] = '1'
+
+logsearch_properties['logsearch.solr.metrics.collector.hosts'] = format(logsearch_properties['logsearch.solr.metrics.collector.hosts'])
+
+logsearch_properties['logsearch.solr.jmx.port'] = infra_solr_jmx_port
+
+logsearch_properties['logsearch.login.credentials.file'] = logsearch_admin_credential_file
+logsearch_properties['logsearch.auth.file.enable'] = 'true'
+logsearch_properties['logsearch.auth.ldap.enable'] = 'false'
+logsearch_properties['logsearch.auth.simple.enable'] = 'false'
+logsearch_properties['logsearch.roles.allowed'] = 'AMBARI.ADMINISTRATOR'
+logsearch_properties['logsearch.auth.external_auth.host_url'] = format(logsearch_properties['logsearch.auth.external_auth.host_url'])
+
+logsearch_properties['logsearch.protocol'] = logsearch_ui_protocol
+
+if security_enabled:
+  logfeeder_properties['logsearch.solr.kerberos.enable'] = 'true'
+  logfeeder_properties['logsearch.solr.jaas.file'] = logsearch_jaas_file
+
+
+logsearch_solr_collection_service_logs = logsearch_properties['logsearch.solr.collection.service.logs']
+logsearch_service_logs_split_interval_mins = logsearch_properties['logsearch.service.logs.split.interval.mins']
+logsearch_collection_service_logs_numshards = logsearch_properties['logsearch.collection.service.logs.numshards']
+
+logsearch_solr_collection_audit_logs = logsearch_properties['logsearch.solr.collection.audit.logs']
+logsearch_audit_logs_split_interval_mins = logsearch_properties['logsearch.audit.logs.split.interval.mins']
+logsearch_collection_audit_logs_numshards = logsearch_properties['logsearch.collection.audit.logs.numshards']
 
 #####################################
 # Logfeeder configs
@@ -268,29 +259,31 @@ logfeeder_truststore_location = config['configurations']['logfeeder-env']['logfe
 logfeeder_truststore_password = config['configurations']['logfeeder-env']['logfeeder_truststore_password']
 logfeeder_truststore_type = config['configurations']['logfeeder-env']['logfeeder_truststore_type']
 
-logfeeder_checkpoint_folder = default('/configurations/logfeeder-env/logfeeder.checkpoint.folder',
-                                      '/etc/ambari-logsearch-logfeeder/conf/checkpoints')
+logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra',
+                                'kafka', 'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn',
+                                'zeppelin', 'zookeeper']
 
-logfeeder_log_filter_enable = str(default('/configurations/logfeeder-properties/logfeeder.log.filter.enable', True)).lower()
-logfeeder_solr_config_interval = default('/configurations/logfeeder-properties/logfeeder.solr.config.interval', 5)
+logfeeder_config_file_names = \
+  ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
 
-logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra', 'kafka',
-                                'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn', 'zeppelin', 'zookeeper']
+default_config_files = ','.join(logfeeder_config_file_names)
 
-logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in
-                                                                              logfeeder_supported_services]
+logfeeder_properties = dict(config['configurations']['logfeeder-properties'])
 
-default_config_files = ','.join(logfeeder_config_file_names)
+logfeeder_properties['logfeeder.metrics.collector.hosts'] = format(logfeeder_properties['logfeeder.metrics.collector.hosts'])
+logfeeder_properties['logfeeder.config.files'] = format(logfeeder_properties['logfeeder.config.files'])
+logfeeder_properties['logfeeder.solr.core.config.name'] = 'history'
+logfeeder_properties['logfeeder.solr.zk_connect_string'] = zookeeper_quorum + infra_solr_znode
 
-logfeeder_config_files = format(config['configurations']['logfeeder-properties']['logfeeder.config.files'])
-logfeeder_metrics_collector_hosts = format(config['configurations']['logfeeder-properties']['logfeeder.metrics.collector.hosts'])
+if security_enabled:
+  logfeeder_properties['logfeeder.solr.kerberos.enable'] = 'true'
+  logfeeder_properties['logfeeder.solr.jaas.file'] = logfeeder_jaas_file
 
-logfeeder_custom_properties = dict(config['configurations']['logfeeder-properties'])
-logfeeder_custom_properties.pop('logfeeder.config.files', None)
-logfeeder_custom_properties.pop('logfeeder.checkpoint.folder', None)
-logfeeder_custom_properties.pop('logfeeder.metrics.collector.hosts', None)
-logfeeder_custom_properties.pop('logfeeder.log.filter.enable', None)
-logfeeder_custom_properties.pop('logfeeder.solr.config.interval', None)
+logfeeder_checkpoint_folder = logfeeder_properties['logfeeder.checkpoint.folder']
+
+#####################################
+# Smoke command
+#####################################
 
 logsearch_server_hosts = config['clusterHostInfo']['logsearch_server_hosts']
 logsearch_server_host = ""

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
index d3851d3..688a9b0 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
@@ -20,12 +20,12 @@ limitations under the License.
 from resource_management.core.resources.system import Directory, File
 from resource_management.libraries.functions.format import format
 from resource_management.core.source import InlineTemplate, Template
+from resource_management.libraries.resources.properties_file import PropertiesFile
 
 def setup_logfeeder():
   import params
 
-  Directory([params.logfeeder_log_dir, params.logfeeder_pid_dir,
-             params.logfeeder_checkpoint_folder],
+  Directory([params.logfeeder_log_dir, params.logfeeder_pid_dir, params.logfeeder_checkpoint_folder],
             mode=0755,
             cd_access='a',
             create_parents=True
@@ -43,9 +43,9 @@ def setup_logfeeder():
        content=''
        )
 
-  File(format("{logsearch_logfeeder_conf}/logfeeder.properties"),
-       content=Template("logfeeder.properties.j2"),
-       )
+  PropertiesFile(format("{logsearch_logfeeder_conf}/logfeeder.properties"),
+                 properties = params.logfeeder_properties
+                 )
 
   File(format("{logsearch_logfeeder_conf}/logfeeder-env.sh"),
        content=InlineTemplate(params.logfeeder_env_content),

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
index aa89609..8d84093 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py
@@ -23,6 +23,7 @@ from resource_management.core.resources.system import Directory, Execute, File
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.decorator import retry
 from resource_management.core.source import InlineTemplate, Template
+from resource_management.libraries.resources.properties_file import PropertiesFile
 
 
 def setup_logsearch():
@@ -51,10 +52,8 @@ def setup_logsearch():
        content=''
        )
 
-  File(format("{logsearch_server_conf}/logsearch.properties"),
-       content=Template("logsearch.properties.j2"),
-       owner=params.logsearch_user,
-       group=params.user_group
+  PropertiesFile(format("{logsearch_server_conf}/logsearch.properties"),
+       properties=params.logsearch_properties
        )
 
   File(format("{logsearch_server_conf}/HadoopServiceConfig.json"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
deleted file mode 100644
index 86f0be4..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-logfeeder.checkpoint.folder={{logfeeder_checkpoint_folder}}
-logfeeder.metrics.collector.hosts={{logfeeder_metrics_collector_hosts}}
-logfeeder.config.files={{logfeeder_config_files}}
-logfeeder.log.filter.enable={{logfeeder_log_filter_enable}}
-logfeeder.solr.config.interval={{logfeeder_solr_config_interval}}
-logfeeder.solr.core.config.name=history
-logfeeder.solr.zk_connect_string={{zookeeper_quorum}}{{infra_solr_znode}}
-
-# Custom properties
-{% for key, value in logfeeder_custom_properties.items() %}
-{{key}}={{value}}
-{% endfor %}
-
-{% if security_enabled -%}
-logfeeder.solr.kerberos.enable=true
-logfeeder.solr.jaas.file={{logfeeder_jaas_file}}
-{% endif %}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
deleted file mode 100644
index 1bcee2d..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
+++ /dev/null
@@ -1,72 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-logsearch.solr.zk_connect_string={{zookeeper_quorum}}{{infra_solr_znode}}
-
-# Service Logs
-logsearch.solr.collection.service.logs={{logsearch_solr_collection_service_logs}}
-
-logsearch.service.logs.split.interval.mins={{logsearch_service_logs_split_interval_mins}}
-logsearch.collection.service.logs.numshards={{logsearch_collection_service_logs_numshards}}
-logsearch.collection.service.logs.replication.factor={{logsearch_collection_service_logs_replication_factor}}
-
-logsearch.service.logs.fields={{logsearch_service_logs_fields}}
-
-# Audit logs
-logsearch.solr.audit.logs.zk_connect_string={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
-logsearch.solr.collection.audit.logs={{logsearch_solr_collection_audit_logs}}
-logsearch.solr.audit.logs.url=
-
-logsearch.audit.logs.split.interval.mins={{logsearch_audit_logs_split_interval_mins}}
-logsearch.collection.audit.logs.numshards={{logsearch_collection_audit_logs_numshards}}
-logsearch.collection.audit.logs.replication.factor={{logsearch_collection_audit_logs_replication_factor}}
-
-# History logs
-logsearch.solr.collection.history=history
-logsearch.solr.history.config.name=history
-logsearch.collection.history.replication.factor=1
-
-# Metrics
-logsearch.solr.metrics.collector.hosts={{logsearch_solr_metrics_collector_hosts}}
-logsearch.solr.jmx.port={{infra_solr_jmx_port}}
-
-# Logfeeder Settings
-
-logsearch.logfeeder.include.default.level={{logsearch_logfeeder_include_default_level}}
-
-# logsearch-admin.json
-logsearch.auth.file.enable=true
-logsearch.login.credentials.file={{logsearch_admin_credential_file}}
-
-logsearch.auth.ldap.enable=false
-logsearch.auth.simple.enable=false
-
-logsearch.auth.external_auth.enable={{logsearch_auth_external_enabled}}
-logsearch.auth.external_auth.host_url={{logsearch_auth_external_host_url}}
-logsearch.auth.external_auth.login_url={{logsearch_auth_external_login_url}}
-logsearch.roles.allowed=AMBARI.ADMINISTRATOR
-
-logsearch.protocol={{logsearch_ui_protocol}}
-
-# Custom properties
-{% for key, value in logsearch_custom_properties.items() %}
-{{key}}={{value}}
-{% endfor %}
-
-{% if security_enabled -%}
-logsearch.solr.kerberos.enable=true
-logsearch.solr.jaas.file={{logsearch_jaas_file}}
-{% endif %}
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index f4dbd63..7f9acba 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -60,8 +60,13 @@ class TestLogFeeder(RMFTestCase):
                               mode=0644,
                               content=''
                               )
-    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
-                              content=Template('logfeeder.properties.j2')
+    self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
+                              properties={'logfeeder.checkpoint.folder': '/etc/ambari-logsearch-logfeeder/conf/checkpoints',
+                                          'logfeeder.config.files': 'global.config.json,output.config.json,input.config-accumulo.json,input.config-ambari.json,input.config-ams.json,input.config-atlas.json,input.config-falcon.json,input.config-flume.json,input.config-hbase.json,input.config-hdfs.json,input.config-hive.json,input.config-hst.json,input.config-infra.json,input.config-kafka.json,input.config-knox.json,input.config-logsearch.json,input.config-nifi.json,input.config-oozie.json,input.config-ranger.json,input.config-spark.json,input.config-spark2.json,input.config-storm.json,input.config-yarn.json,input.config-zeppelin.json,input.config-zookeeper.json',
+                                          'logfeeder.metrics.collector.hosts': '',
+                                          'logfeeder.solr.core.config.name': 'history',
+                                          'logfeeder.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr'
+                                         }
                               )
     self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/logfeeder-env.sh',
                               mode=0755,
@@ -75,11 +80,12 @@ class TestLogFeeder(RMFTestCase):
                               encoding='utf-8'
                               )
 
-    logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra', 'kafka',
-                                    'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn', 'zeppelin', 'zookeeper']
+    logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst',
+                                    'infra', 'kafka', 'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm',
+                                    'yarn', 'zeppelin', 'zookeeper']
 
-    logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag
-                                                                                  in logfeeder_supported_services]
+    logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + \
+                                  ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
 
     for file_name in logfeeder_config_file_names:
       self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/' + file_name,

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index 5a2e6e8..0378a94 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -70,10 +70,32 @@ class TestLogSearch(RMFTestCase):
                               mode = 0644,
                               content = ''
     )
-    self.assertResourceCalled('File', '/etc/ambari-logsearch-portal/conf/logsearch.properties',
-                              owner = 'logsearch',
-                              group='hadoop',
-                              content = Template('logsearch.properties.j2')
+    self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-portal/conf/logsearch.properties',
+                              properties = {'logsearch.audit.logs.split.interval.mins': '1',
+                                            'logsearch.auth.external_auth.enabled': 'false',
+                                            'logsearch.auth.external_auth.host_url': 'http://c6401.ambari.apache.org:8080',
+                                            'logsearch.auth.external_auth.login_url': '/api/v1/users/$USERNAME/privileges?fields=*',
+                                            'logsearch.auth.file.enable': 'true',
+                                            'logsearch.auth.ldap.enable': 'false',
+                                            'logsearch.auth.simple.enable': 'false',
+                                            'logsearch.collection.audit.logs.numshards': '10',
+                                            'logsearch.collection.audit.logs.replication.factor': '1',
+                                            'logsearch.collection.history.replication.factor': '1',
+                                            'logsearch.collection.service.logs.numshards': '10',
+                                            'logsearch.collection.service.logs.replication.factor': '1',
+                                            'logsearch.login.credentials.file': 'logsearch-admin.json',
+                                            'logsearch.protocol': 'http',
+                                            'logsearch.roles.allowed': 'AMBARI.ADMINISTRATOR',
+                                            'logsearch.service.logs.split.interval.mins': '1',
+                                            'logsearch.solr.audit.logs.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr',
+                                            'logsearch.solr.collection.audit.logs': 'audit_logs',
+                                            'logsearch.solr.collection.history': 'history',
+                                            'logsearch.solr.collection.service.logs': 'hadoop_logs',
+                                            'logsearch.solr.history.config.name': 'history',
+                                            'logsearch.solr.metrics.collector.hosts': '',
+                                            'logsearch.solr.jmx.port': '1',
+                                            'logsearch.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr'
+                              }
     )
     self.assertResourceCalled('File', '/etc/ambari-logsearch-portal/conf/HadoopServiceConfig.json',
                               owner = 'logsearch',

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index a016ce0..8ac6fa7 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -250,6 +250,7 @@
         "infra_solr_datadir": "/opt/ambari_infra_solr/data",
         "infra_solr_log_dir": "/var/log/ambari-infra-solr",
         "infra_solr_client_log_dir": "/var/log/ambari-infra-solr-client",
+        "infra_solr_jmx_port" : "1",
         "content": "# By default the script will use JAVA_HOME to determine which java\n# to use, but you can set a specific path for Solr to use without\n# affecting other Java applications on your server/workstation.\nSOLR_JAVA_HOME={{java64_home}}\n\n# Increase Java Min/Max Heap as needed to support your indexing / query needs\nSOLR_JAVA_MEM=\"-Xms{{solr_min_mem}} -Xmx{{solr_max_mem}}\"\n\n# Enable verbose GC logging\nGC_LOG_OPTS=\"-verbose:gc -XX:+PrintHeapAtGC -XX:+PrintGCDetails \\\n-XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -XX:+PrintGCApplicationStoppedTime\"\n\n# These GC settings have shown to work well for a number of common Solr workloads\nGC_TUNE=\"-XX:NewRatio=3 \\\n-XX:SurvivorRatio=4 \\\n-XX:TargetSurvivorRatio=90 \\\n-XX:MaxTenuringThreshold=8 \\\n-XX:+UseConcMarkSweepGC \\\n-XX:+UseParNewGC \\\n-XX:ConcGCThreads=4 -XX:ParallelGCThreads=4 \\\n-XX:+CMSScavengeBeforeRemark \\\n-XX:PretenureSizeThreshold=64m \\\n-XX:+UseCMSInitiatingOc
 cupancyOnly \\\n-XX:CMSInitiatingOccupancyFraction=50 \\\n-XX:CMSMaxAbortablePrecleanTime=6000 \\\n-XX:+CMSParallelRemarkEnabled \\\n-XX:+ParallelRefProcEnabled\"\n\n# Set the ZooKeeper connection string if using an external ZooKeeper ensemble\n# e.g. host1:2181,host2:2181/chroot\n# Leave empty if not using SolrCloud\nZK_HOST=\"{{zookeeper_quorum}}{{solr_znode}}\"\n\n# Set the ZooKeeper client timeout (for SolrCloud mode)\nZK_CLIENT_TIMEOUT=\"60000\"\n\n# By default the start script uses \"localhost\"; override the hostname here\n# for production SolrCloud environments to control the hostname exposed to cluster state\n#SOLR_HOST=\"192.168.1.1\"\n\n# By default the start script uses UTC; override the timezone if needed\n#SOLR_TIMEZONE=\"UTC\"\n\n# Set to true to activate the JMX RMI connector to allow remote JMX client applications\n# to monitor the JVM hosting Solr; set to \"false\" to disable that behavior\n# (false is recommended in production environments)\nENABLE_REMOTE_JMX_OPTS
 =\"false\"\n\n# The script will use SOLR_PORT+10000 for the RMI_PORT or you can set it here\n# RMI_PORT=18983\n\n# Anything you add to the SOLR_OPTS variable will be included in the java\n# start command line as-is, in ADDITION to other options. If you specify the\n# -a option on start script, those options will be appended as well. Examples:\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=3000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.autoCommit.maxTime=60000\"\n#SOLR_OPTS=\"$SOLR_OPTS -Dsolr.clustering.enabled=true\"\n\n# Location where the bin/solr script will save PID files for running instances\n# If not set, the script will create PID files in $SOLR_TIP/bin\nSOLR_PID_DIR={{solr_piddir}}\n\n# Path to a directory where Solr creates index files, the specified directory\n# must contain a solr.xml; by default, Solr will use server/solr\nSOLR_HOME={{infra_solr_datadir}}\n\n# Solr provides a default Log4J configuration properties file in server/resources\n# however, you may want to cu
 stomize the log settings and file appender location\n# so you can point the script to use a different log4j.properties file\nLOG4J_PROPS={{logsearch_solr_datadir}}/resources/log4j.properties\n\n# Location where Solr should write logs to; should agree with the file appender\n# settings in server/resources/log4j.properties\nSOLR_LOGS_DIR={{solr_log_dir}}\n\n# Sets the port Solr binds to, default is 8983\nSOLR_PORT={{solr_port}}\n\n# Be sure to update the paths to the correct keystore for your environment\n{% if logsearch_solr_ssl_enabled %}\nSOLR_SSL_KEY_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_KEY_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_TRUST_STORE={{logsearch_solr_keystore_location}}\nSOLR_SSL_TRUST_STORE_PASSWORD={{logsearch_solr_keystore_password}}\nSOLR_SSL_NEED_CLIENT_AUTH=false\nSOLR_SSL_WANT_CLIENT_AUTH=false\n{% endif %}\n\n# Uncomment to set a specific SSL port (-Djetty.ssl.port=N); if not set\n# and you are using SSL, then the start script 
 will use SOLR_PORT for the SSL port\n#SOLR_SSL_PORT="
       },
       "infra-solr-xml": {
@@ -280,17 +281,21 @@
         "content": "&lt;?xml version=\"1.0\" encoding=\"UTF-8\" ?&gt;\n&lt;!--\n  Licensed to the Apache Software Foundation (ASF) under one or more\n  contributor license agreements.  See the NOTICE file distributed with\n  this work for additional information regarding copyright ownership.\n  The ASF licenses this file to You under the Apache License, Version 2.0\n  (the \"License\"); you may not use this file except in compliance with\n  the License.  You may obtain a copy of the License at\n\n      http://www.apache.org/licenses/LICENSE-2.0\n\n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n--&gt;\n&lt;!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\"&gt;\n&lt;log4j:configuration xmlns:log4j=\"http://jakarta.
 apache.org/log4j/\"&gt;\n  &lt;appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\"&gt;\n    &lt;param name=\"Target\" value=\"System.out\" /&gt;\n    &lt;layout class=\"org.apache.log4j.PatternLayout\"&gt;\n      &lt;param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\" /&gt;\n    &lt;/layout&gt;\n  &lt;/appender&gt;\n\n  &lt;appender name=\"rolling_file\" class=\"org.apache.log4j.RollingFileAppender\"&gt; \n    &lt;param name=\"file\" value=\"{{logsearch_log_dir}}/logsearch.log\" /&gt; \n    &lt;param name=\"append\" value=\"true\" /&gt; \n    &lt;param name=\"maxFileSize\" value=\"10MB\" /&gt; \n    &lt;param name=\"maxBackupIndex\" value=\"10\" /&gt; \n    &lt;layout class=\"org.apache.log4j.PatternLayout\"&gt; \n      &lt;param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\"/&gt; \n    &lt;/layout&gt; \n  &lt;/appender&gt; \n\n  &lt;appender name=\"performance_analyzer\" class=\"org.apache.log4j.RollingFileAppender\"
 &gt;\n    &lt;param name=\"file\" value=\"{{logsearch_log_dir}}/logsearch-performance.log\" /&gt;\n    &lt;param name=\"Threshold\" value=\"info\" /&gt;\n    &lt;param name=\"append\" value=\"true\" /&gt;\n    &lt;param name=\"maxFileSize\" value=\"10MB\" /&gt; \n    &lt;param name=\"maxBackupIndex\" value=\"10\" /&gt; \n    &lt;layout class=\"org.apache.log4j.PatternLayout\"&gt;\n      &lt;param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\" /&gt;\n    &lt;/layout&gt;\n  &lt;/appender&gt;\n  \n  &lt;logger name=\"org.apache.ambari.logsearch.perfomance\" additivity=\"false\"&gt;\n   &lt;appender-ref ref=\"performance_analyzer\" /&gt;\n  &lt;/logger&gt;\n\n  &lt;category name=\"org.apache.ambari.logsearch\" additivity=\"false\"&gt;\n    &lt;priority value=\"info\" /&gt;\n    &lt;appender-ref ref=\"rolling_file\" /&gt;\n  &lt;/category&gt;\n\n  &lt;root&gt;\n    &lt;priority value=\"warn\" /&gt;\n    &lt;appender-ref ref=\"rolling_file\" /&gt;\n  &lt;/root&gt;\
 n&lt;/log4j:configuration&gt;"
       },
       "logsearch-properties": {
+        "logsearch.solr.collection.service.logs" : "hadoop_logs",
+        "logsearch.solr.collection.audit.logs" : "audit_logs",
+        "logsearch.service.logs.split.interval.mins": "1",
+        "logsearch.audit.logs.split.interval.mins" : "1",
         "logsearch.collection.service.logs.numshards": "10",
         "logsearch.collection.service.logs.replication.factor": "1",
         "logsearch.collection.audit.logs.numshards": "10",
         "logsearch.collection.audit.logs.replication.factor": "1",
-        "logsearch.app.max.memory": "1g",
         "logsearch.solr.metrics.collector.hosts" : "{metrics_collector_hosts}",
-        "logsearch.external.auth.enabled" : "false",
-        "logsearch.external.auth.host_url" : "{ambari_server_auth_host_url}",
-        "logsearch.external.auth.login_url" : "/api/v1/users/$USERNAME/privileges?fields=*"
+        "logsearch.auth.external_auth.enabled" : "false",
+        "logsearch.auth.external_auth.host_url" : "{ambari_server_auth_host_url}",
+        "logsearch.auth.external_auth.login_url" : "/api/v1/users/$USERNAME/privileges?fields=*"
       },
       "logfeeder-properties": {
+        "logfeeder.checkpoint.folder" : "/etc/ambari-logsearch-logfeeder/conf/checkpoints",
         "logfeeder.metrics.collector.hosts" : "{metrics_collector_hosts}",
         "logfeeder.config.files" : "{default_config_files}"
       },

http://git-wip-us.apache.org/repos/asf/ambari/blob/3f790c8f/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js b/ambari-web/app/data/HDP2/site_properties.js
index 1fad1d6..9311e75 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -2215,21 +2215,21 @@ var hdp2properties = [
     "index": 6
   },
   {
-    "name": "logsearch.external.auth.enabled",
+    "name": "logsearch.auth.external_auth.enable",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-properties.xml",
     "category": "Advanced logsearch-properties",
     "index": 7
   },
   {
-    "name": "logsearch.external.auth.host_url",
+    "name": "logsearch.auth.external_auth.host_url",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-properties.xml",
     "category": "Advanced logsearch-properties",
     "index": 8
   },
   {
-    "name": "logsearch.external.auth.login_url",
+    "name": "logsearch.auth.external_auth.login_url",
     "serviceName": "LOGSEARCH",
     "filename": "logsearch-properties.xml",
     "category": "Advanced logsearch-properties",


[14/50] [abbrv] ambari git commit: AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
deleted file mode 100644
index de83e7e..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/StringUtil.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.util;
-
-import org.apache.log4j.Logger;
-import org.springframework.stereotype.Component;
-
-@Component
-public class StringUtil {
-  
-  private static  Logger logger = Logger.getLogger(StringUtil.class);
-  
-  public boolean isEmpty(String str) {
-    return str == null || str.trim().length() == 0;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
index 69132e8..9fb285e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
@@ -29,12 +29,10 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpSession;
 
-import org.apache.ambari.logsearch.common.RequestContext;
-import org.apache.ambari.logsearch.common.UserSessionInfo;
+import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.manager.SessionMgr;
-import org.apache.ambari.logsearch.security.context.LogsearchContextHolder;
-import org.apache.ambari.logsearch.security.context.LogsearchSecurityContext;
 import org.apache.ambari.logsearch.util.CommonUtil;
+import org.apache.ambari.logsearch.web.model.User;
 import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.AnonymousAuthenticationToken;
@@ -89,31 +87,21 @@ public class LogsearchSecurityContextFormationFilter extends GenericFilterBean {
           httpResponse.addCookie(cookie);
         }
         // [1]get the context from session
-        LogsearchSecurityContext context = (LogsearchSecurityContext) httpSession
+        LogSearchContext context = (LogSearchContext) httpSession
           .getAttribute(LOGSEARCH_SC_SESSION_KEY);
         if (context == null) {
-          context = new LogsearchSecurityContext();
+          context = new LogSearchContext();
           httpSession.setAttribute(LOGSEARCH_SC_SESSION_KEY, context);
         }
-        String userAgent = httpRequest.getHeader(USER_AGENT);
-        // Get the request specific info
-        RequestContext requestContext = new RequestContext();
-        String reqIP = httpRequest.getRemoteAddr();
-        requestContext.setIpAddress(reqIP);
-        requestContext.setMsaCookie(msaCookie);
-        requestContext.setUserAgent(userAgent);
-        requestContext.setServerRequestId(CommonUtil.genGUI());
-        requestContext.setRequestURL(httpRequest.getRequestURI());
-        context.setRequestContext(requestContext);
-        LogsearchContextHolder.setSecurityContext(context);
-        UserSessionInfo userSession = sessionMgr.processSuccessLogin(0, userAgent);
-        context.setUserSession(userSession);
+        LogSearchContext.setContext(context);
+        User user = sessionMgr.processSuccessLogin();
+        context.setUser(user);
       }
       chain.doFilter(request, response);
 
     } finally {
       // [4]remove context from thread-local
-      LogsearchContextHolder.resetSecurityContext();
+      LogSearchContext.resetContext();
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
index f1f2e31..72ee60f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
@@ -26,8 +26,8 @@ import javax.annotation.PostConstruct;
 import org.apache.ambari.logsearch.util.ExternalServerClient;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.BadCredentialsException;
@@ -99,9 +99,6 @@ public class LogsearchExternalServerAuthenticationProvider extends
   ExternalServerClient externalServerClient;
 
   @Autowired
-  StringUtil stringUtil;
-
-  @Autowired
   JSONUtil jsonUtil;
 
   private String loginAPIURL = "/api/v1/users/$USERNAME/privileges?fields=*";// default
@@ -130,10 +127,10 @@ public class LogsearchExternalServerAuthenticationProvider extends
     }
     String username = authentication.getName();
     String password = (String) authentication.getCredentials();
-    if (stringUtil.isEmpty(username)) {
+    if (StringUtils.isBlank(username)) {
       throw new BadCredentialsException("Username can't be null or empty.");
     }
-    if (stringUtil.isEmpty(password)) {
+    if (StringUtils.isBlank(password)) {
       throw new BadCredentialsException("Password can't be null or empty.");
     }
     // html unescape

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
index a5ff295..44c31c5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
@@ -20,9 +20,9 @@ package org.apache.ambari.logsearch.web.security;
 
 import java.util.Collection;
 
-import org.apache.ambari.logsearch.dao.UserDao;
-import org.apache.ambari.logsearch.util.StringUtil;
+import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.BadCredentialsException;
@@ -40,12 +40,6 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
   private static Logger logger = Logger.getLogger(LogsearchFileAuthenticationProvider.class);
 
   @Autowired
-  UserDao userDao;
-
-  @Autowired
-  StringUtil stringUtil;
-
-  @Autowired
   private UserDetailsService userDetailsService;
 
   @Override
@@ -56,10 +50,10 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
     }
     String username = authentication.getName();
     String password = (String) authentication.getCredentials();
-    if (stringUtil.isEmpty(username)) {
+    if (StringUtils.isBlank(username)) {
       throw new BadCredentialsException("Username can't be null or empty.");
     }
-    if (stringUtil.isEmpty(password)) {
+    if (StringUtils.isBlank(password)) {
       throw new BadCredentialsException("Password can't be null or empty.");
     }
     // html unescape
@@ -76,7 +70,7 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
       throw new BadCredentialsException("Password can't be null or empty.");
     }
 
-    String encPassword = userDao.encryptPassword(username, password);
+    String encPassword = CommonUtil.encryptPassword(username, password);
     if (!encPassword.equals(user.getPassword())) {
       logger.error("Wrong password for user=" + username);
       throw new BadCredentialsException("Wrong password");

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
index 7e0546e..ec2516c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
@@ -18,11 +18,10 @@
  */
 package org.apache.ambari.logsearch.web.security;
 
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.BadCredentialsException;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.Authentication;
@@ -34,9 +33,6 @@ public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuth
 
   private static Logger logger = Logger.getLogger(LogsearchSimpleAuthenticationProvider.class);
 
-  @Autowired
-  StringUtil stringUtil;
-
   @Override
   public Authentication authenticate(Authentication authentication) throws AuthenticationException {
     if (!this.isEnable()) {
@@ -46,7 +42,7 @@ public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuth
     String username = authentication.getName();
     String password = (String) authentication.getCredentials();
     username = StringEscapeUtils.unescapeHtml(username);
-    if (stringUtil.isEmpty(username)) {
+    if (StringUtils.isBlank(username)) {
       throw new BadCredentialsException("Username can't be null or empty.");
     }
     User user = new User();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java
new file mode 100644
index 0000000..cd33741
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/LogSearchContextUtilTest.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.common;
+
+import org.apache.ambari.logsearch.web.model.User;
+import org.junit.Before;
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class LogSearchContextUtilTest {
+
+  @Before
+  public void resetContext() {
+    LogSearchContext.resetContext();
+  }
+  
+  @Test
+  public void testNoContext() {
+    Assert.assertNull(LogSearchContext.getCurrentUsername());
+  }
+  
+  @Test
+  public void testUserSession() {
+    User user = new User("UserName", "Password", null);
+    
+    LogSearchContext context = new LogSearchContext();
+    context.setUser(user);
+    
+    LogSearchContext.setContext(context);
+    
+    Assert.assertEquals(LogSearchContext.getCurrentUsername(), "UserName");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java
new file mode 100644
index 0000000..6b75d87
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/common/ManageStartEndTimeTest.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.common;
+
+import java.util.Date;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class ManageStartEndTimeTest {
+
+  @Test
+  public void testManageStartEndTime() {
+    ManageStartEndTime.manage();
+    Date[] range = ManageStartEndTime.getStartEndTime();
+    Assert.assertEquals(range[1].getTime() - range[0].getTime(), 60*60*1000);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
new file mode 100644
index 0000000..0b94b60
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.dao;
+
+import java.util.ArrayList;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.common.util.NamedList;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class AuditSolrDaoTest {
+
+  @Test
+  public void testAuditSolrDaoPostConstructor() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", new ArrayList<String>());
+    
+    Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    
+    mockSolrClient.request(EasyMock.capture(captureCreateRequest), EasyMock.anyString());
+    EasyMock.expectLastCall().andReturn(response);
+    
+    EasyMock.replay(mockSolrClient);
+    
+    AuditSolrDao dao = new AuditSolrDao();
+    dao.solrClient = mockSolrClient;
+    dao.isZkConnectString = true;
+    
+    dao.postConstructor();
+    EasyMock.verify(mockSolrClient);
+    
+    CollectionAdminRequest.Create createRequest = captureCreateRequest.getValue();
+    Assert.assertEquals(createRequest.getConfigName(), "test_audit_logs_config_name");
+    Assert.assertEquals(createRequest.getNumShards().intValue(), 123);
+    Assert.assertEquals(createRequest.getReplicationFactor().intValue(), 456);
+    Assert.assertEquals(createRequest.getCollectionName(), "test_audit_logs_collection");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
new file mode 100644
index 0000000..2985a62
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.dao;
+
+import java.util.ArrayList;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.common.util.NamedList;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import junit.framework.Assert;
+
+public class ServiceLogsSolrDaoTest {
+
+  @Test
+  public void testServiceLogsSolrDaoPostConstructor() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", new ArrayList<String>());
+    
+    Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    mockSolrClient.request(EasyMock.capture(captureCreateRequest), EasyMock.anyString()); EasyMock.expectLastCall().andReturn(response);
+    
+    EasyMock.replay(mockSolrClient);
+    
+    ServiceLogsSolrDao dao = new ServiceLogsSolrDao();
+    dao.solrClient = mockSolrClient;
+    dao.isZkConnectString = true;
+    
+    dao.postConstructor();
+    EasyMock.verify(mockSolrClient);
+    
+    CollectionAdminRequest.Create createRequest = captureCreateRequest.getValue();
+    Assert.assertEquals(createRequest.getConfigName(), "test_service_logs_config_name");
+    Assert.assertEquals(createRequest.getNumShards().intValue(), 789);
+    Assert.assertEquals(createRequest.getReplicationFactor().intValue(), 987);
+    Assert.assertEquals(createRequest.getCollectionName(), "test_service_logs_collection");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
new file mode 100644
index 0000000..0ded95d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
@@ -0,0 +1,286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.dao;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import javax.ws.rs.WebApplicationException;
+
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrRequest.METHOD;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.NamedList;
+import org.easymock.EasyMock;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import junit.framework.Assert;
+
+public class SolrDaoBaseTest {
+  @Rule
+  public ExpectedException expectedException = ExpectedException.none();
+  
+  // ----------------------------------------------------------- connectToSolr -----------------------------------------------------------
+  
+  @Test
+  public void testConnectToSolrWithConnectString() throws Exception {
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    SolrClient solrClient = dao.connectToSolr(null, "zk_connect_string", "collection");
+    
+    Assert.assertEquals(solrClient.getClass(), CloudSolrClient.class);
+  }
+  
+  @Test
+  public void testConnectToSolrWithUrl() throws Exception {
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    SolrClient solrClient = dao.connectToSolr("url", null, "collection");
+    
+    Assert.assertEquals(solrClient.getClass(), HttpSolrClient.class);
+  }
+  
+  @Test
+  public void testConnectToSolrWithBoth() throws Exception {
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    SolrClient solrClient = dao.connectToSolr("url", "zk_connect_string", "collection");
+    
+    Assert.assertEquals(solrClient.getClass(), CloudSolrClient.class);
+  }
+  
+  @Test
+  public void testConnectToSolrWithNeither() throws Exception {
+    expectedException.expect(Exception.class);
+    expectedException.expectMessage("Both zkConnectString and URL are empty. zkConnectString=null, collection=collection, url=null");
+
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.connectToSolr(null, null, "collection");
+  }
+  
+  @Test
+  public void testConnectToSolrWithoutCollection() throws Exception {
+    expectedException.expect(Exception.class);
+    expectedException.expectMessage("For solr, collection name is mandatory. zkConnectString=zk_connect_string, collection=null, url=url");
+
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.connectToSolr("url", "zk_connect_string", null);
+  }
+  
+  // ---------------------------------------------------------- checkSolrStatus ----------------------------------------------------------
+  
+  @Test
+  public void testCheckSolrStatus() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", new ArrayList<String>());
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.solrClient = mockSolrClient;
+    
+    boolean status = dao.checkSolrStatus(10000);
+    Assert.assertTrue(status);
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  @Test
+  public void testCheckSolrStatusNotSuccessful() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.solrClient = mockSolrClient;
+    
+    boolean status = dao.checkSolrStatus(10000);
+    Assert.assertFalse(status);
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  // ------------------------------------------------------------- setupAlias ------------------------------------------------------------
+  
+  @Test
+  public void testSetupAlias() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    CloudSolrClient mockSolrClouldClient = EasyMock.strictMock(CloudSolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", Arrays.asList("collection1", "collection2"));
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.expect(mockSolrClouldClient.request(EasyMock.anyObject(CollectionAdminRequest.CreateAlias.class), EasyMock.anyString())).andReturn(response);
+    mockSolrClouldClient.setDefaultCollection("alias_name"); EasyMock.expectLastCall();
+    
+    EasyMock.replay(mockSolrClient, mockSolrClouldClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.isZkConnectString = true;
+    dao.solrClient = mockSolrClient;
+    dao.solrClouldClient = mockSolrClouldClient;
+    dao.collectionName = "test_collection";
+    
+    dao.setupAlias("alias_name", Arrays.asList("collection1", "collection2"));
+    
+    Thread.sleep(1000);
+    
+    EasyMock.verify(mockSolrClient, mockSolrClouldClient);
+  }
+  
+  // ---------------------------------------------------------- setupCollections ---------------------------------------------------------
+  
+  @Test
+  public void testCreateCollectionsDontSplitPopulate() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", new ArrayList<String>());
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.Create.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.isZkConnectString = true;
+    dao.solrClient = mockSolrClient;
+    dao.collectionName = "test_collection";
+    
+    dao.setupCollections("none", "configName", 1, 1, true);
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  @Test
+  public void testCreateCollectionsSplitDontPopulate() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("status", 0);
+    response.add("responseHeader", header);
+    response.add("collections", new ArrayList<String>());
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.Create.class), EasyMock.anyString())).andReturn(response);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.isZkConnectString = true;
+    dao.solrClient = mockSolrClient;
+    
+    dao.setupCollections("1", "configName", 3, 1, false);
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  // -------------------------------------------------------------- process --------------------------------------------------------------
+  
+  @Test
+  public void testProcess() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    EasyMock.expect(mockSolrClient.query(EasyMock.anyObject(SolrQuery.class), EasyMock.eq(METHOD.POST))).andReturn(new QueryResponse());
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.solrClient = mockSolrClient;
+    
+    dao.process(new SolrQuery());
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  @Test
+  public void testProcessNoConnection() throws Exception {
+    expectedException.expect(WebApplicationException.class);
+    
+    SolrDaoBase dao = new SolrDaoBase(LogType.SERVICE) {};
+    dao.restErrorUtil = new RESTErrorUtil();
+    dao.process(new SolrQuery());
+  }
+  
+  // ----------------------------------------------------------- add/removeDoc -----------------------------------------------------------
+  
+  @Test
+  public void testAddDoc() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    UpdateResponse updateResponse = new UpdateResponse();
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("QTime", 1);
+    response.add("responseHeader", header);
+    updateResponse.setResponse(response);
+    
+    EasyMock.expect(mockSolrClient.add(EasyMock.anyObject(SolrInputDocument.class))).andReturn(updateResponse);
+    EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.solrClient = mockSolrClient;
+    
+    dao.addDocs(new SolrInputDocument());
+    
+    EasyMock.verify(mockSolrClient);
+  }
+  
+  @Test
+  public void testRemoveDoc() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    UpdateResponse updateResponse = new UpdateResponse();
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("QTime", 1);
+    response.add("responseHeader", header);
+    updateResponse.setResponse(response);
+    
+    EasyMock.expect(mockSolrClient.deleteByQuery(EasyMock.anyString())).andReturn(updateResponse);
+    EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
+    EasyMock.replay(mockSolrClient);
+    
+    SolrDaoBase dao = new SolrDaoBase(null) {};
+    dao.solrClient = mockSolrClient;
+    
+    dao.removeDoc("query");
+    
+    EasyMock.verify(mockSolrClient);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
new file mode 100644
index 0000000..5ef286f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.dao;
+
+import java.util.ArrayList;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest.METHOD;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+import junit.framework.Assert;
+
+@RunWith(SpringJUnit4ClassRunner.class)
+@ContextConfiguration(locations = { "/applicationContext.xml" })
+public class UserConfigSolrDaoTest {
+  
+  @Autowired
+  private UserConfigSolrDao dao;
+  
+  @Test
+  public void testUserConfigDaoPostConstructor() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    NamedList<Object> requestResponse = new NamedList<Object>();
+    NamedList<Object> requestResponseHeader = new NamedList<Object>();
+    requestResponseHeader.add("status", 0);
+    requestResponse.add("responseHeader", requestResponseHeader);
+    requestResponse.add("collections", new ArrayList<String>());
+    
+    QueryResponse queryResponse = new QueryResponse();
+    
+    UpdateResponse updateResponse = new UpdateResponse();
+    NamedList<Object> updateResponseContent = new NamedList<Object>();
+    NamedList<Object> updateResponseHeader = new NamedList<Object>();
+    updateResponseHeader.add("QTime", 1);
+    updateResponseContent.add("responseHeader", updateResponseHeader);
+    updateResponse.setResponse(updateResponseContent);
+    
+    Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
+    Capture<SolrParams> captureSolrParams = EasyMock.newCapture(CaptureType.LAST);
+    Capture<METHOD> captureMethod = EasyMock.newCapture(CaptureType.LAST);
+    Capture<SolrInputDocument> captureSolrInputDocument = EasyMock.newCapture(CaptureType.LAST);
+    
+    EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(requestResponse);
+    mockSolrClient.request(EasyMock.capture(captureCreateRequest), EasyMock.anyString()); EasyMock.expectLastCall().andReturn(requestResponse);
+    mockSolrClient.query(EasyMock.capture(captureSolrParams), EasyMock.capture(captureMethod)); EasyMock.expectLastCall().andReturn(queryResponse);
+    mockSolrClient.add(EasyMock.capture(captureSolrInputDocument)); EasyMock.expectLastCall().andReturn(updateResponse);
+    EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
+    EasyMock.replay(mockSolrClient);
+    
+    dao.solrClient = mockSolrClient;
+    dao.isZkConnectString = true;
+    
+    dao.postConstructor();
+    EasyMock.verify(mockSolrClient);
+    
+    CollectionAdminRequest.Create createRequest = captureCreateRequest.getValue();
+    Assert.assertEquals(createRequest.getConfigName(), "test_history_logs_config_name");
+    Assert.assertEquals(createRequest.getReplicationFactor().intValue(), 234);
+    Assert.assertEquals(createRequest.getCollectionName(), "test_history_logs_collection");
+    
+    SolrParams solrParams = captureSolrParams.getValue();
+    Assert.assertEquals(solrParams.get("q"), "*:*");
+    Assert.assertEquals(solrParams.get("fq"), "rowtype:log_feeder_config");
+    
+    METHOD method = captureMethod.getValue();
+    Assert.assertEquals(method, METHOD.POST);
+    
+    SolrInputDocument solrInputDocument = captureSolrInputDocument.getValue();
+    Assert.assertNotNull(solrInputDocument.getFieldValue("id"));
+    Assert.assertEquals(solrInputDocument.getFieldValue("rowtype"), "log_feeder_config");
+    Assert.assertEquals(solrInputDocument.getFieldValue("jsons"), "{\"filter\":{\"test_component2\":{\"label\":\"test_component2\",\"hosts\":[],\"defaultLevels\":[\"FATAL\",\"ERROR\",\"WARN\",\"INFO\",\"DEBUG\",\"TRACE\"],\"overrideLevels\":[]},\"test_component1\":{\"label\":\"test_component1\",\"hosts\":[],\"defaultLevels\":[\"FATAL\",\"ERROR\",\"WARN\",\"INFO\",\"DEBUG\",\"TRACE\"],\"overrideLevels\":[]}},\"id\":\"" + solrInputDocument.getFieldValue("id") + "\"}");
+    Assert.assertEquals(solrInputDocument.getFieldValue("username"), "log_feeder_config");
+    Assert.assertEquals(solrInputDocument.getFieldValue("filtername"), "log_feeder_config");
+  }
+  
+  @Test
+  public void testDeleteUserConfig() throws Exception {
+    SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
+    
+    UpdateResponse updateResponse = new UpdateResponse();
+    NamedList<Object> response = new NamedList<Object>();
+    NamedList<Object> header = new NamedList<Object>();
+    header.add("QTime", 1);
+    response.add("responseHeader", header);
+    updateResponse.setResponse(response);
+    
+    EasyMock.expect(mockSolrClient.deleteByQuery("id:test_id")).andReturn(updateResponse);
+    EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
+    EasyMock.replay(mockSolrClient);
+    
+    dao.solrClient = mockSolrClient;
+    dao.isZkConnectString = true;
+    
+    dao.deleteUserConfig("test_id");
+    
+    EasyMock.verify(mockSolrClient);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
new file mode 100644
index 0000000..703d877
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.dao;
+
+import java.util.Collection;
+
+import org.apache.ambari.logsearch.web.model.Role;
+import org.apache.ambari.logsearch.web.model.User;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertTrue;
+
+@RunWith(SpringJUnit4ClassRunner.class)
+@ContextConfiguration(locations = { "/applicationContext.xml" })
+public class UserDaoTest {
+
+  @Autowired
+  private UserDao dao;
+  
+  @Test
+  public void testUserDaoInitAndFindUser() throws Exception {
+    User user = dao.loadUserByUsername("testUserName");
+    assertEquals(user.getUsername(), "testUserName");
+    assertEquals(user.getFirstName(), "Test User Name");
+    assertEquals(user.getLastName(), "Test User Name");
+    
+    Collection<? extends GrantedAuthority> authorities = user.getAuthorities();
+    assertTrue(authorities.size() == 1);
+    
+    Role authority = (Role)authorities.iterator().next();
+    assertEquals(authority.getName(), "ROLE_USER");
+    assertTrue(authority.getPrivileges().size() == 1);
+    assertEquals(authority.getPrivileges().get(0).getName(), "READ_PRIVILEGE");
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/resources/HadoopServiceConfig.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/HadoopServiceConfig.json b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/HadoopServiceConfig.json
new file mode 100644
index 0000000..344dc3d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/HadoopServiceConfig.json
@@ -0,0 +1,17 @@
+{
+  "service": {
+    "accumulo": {
+      "label": "TestService",
+      "components": [
+        {
+          "name": "test_component1"
+        },
+        {
+          "name": "test_component2"
+        }
+      ],
+      "dependencies": [
+      ]
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
new file mode 100644
index 0000000..5e24d88
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+xmlns:aop="http://www.springframework.org/schema/aop" xmlns:jee="http://www.springframework.org/schema/jee"
+xmlns:tx="http://www.springframework.org/schema/tx" xmlns:context="http://www.springframework.org/schema/context"
+xmlns:task="http://www.springframework.org/schema/task" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xmlns:util="http://www.springframework.org/schema/util"
+xsi:schemaLocation="http://www.springframework.org/schema/aop
+http://www.springframework.org/schema/aop/spring-aop-4.2.xsd
+http://www.springframework.org/schema/beans
+http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
+http://www.springframework.org/schema/context
+http://www.springframework.org/schema/context/spring-context-4.2.xsd
+http://www.springframework.org/schema/jee
+http://www.springframework.org/schema/jee/spring-jee-4.2.xsd
+http://www.springframework.org/schema/tx
+http://www.springframework.org/schema/tx/spring-tx-4.2.xsd
+http://www.springframework.org/schema/task
+http://www.springframework.org/schema/task/spring-task-4.2.xsd
+http://www.springframework.org/schema/util
+http://www.springframework.org/schema/util/spring-util.xsd">
+
+	<context:component-scan base-package="org.apache.ambari.logsearch" />
+	<task:annotation-driven />
+	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.util.XMLPropertiesUtil" />
+	
+	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.util.PropertiesUtil">
+		<property name="locations">
+			<list>
+				<value>classpath:default.properties</value>
+				<value>classpath:logsearch.properties</value>
+				<value>classpath:logsearch-admin-site.xml</value>
+			</list>
+		</property>
+		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
+	</bean>
+	
+</beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
new file mode 100644
index 0000000..f1d1dbe
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<beans xmlns="http://www.springframework.org/schema/beans"
+xmlns:aop="http://www.springframework.org/schema/aop" xmlns:jee="http://www.springframework.org/schema/jee"
+xmlns:tx="http://www.springframework.org/schema/tx" xmlns:context="http://www.springframework.org/schema/context"
+xmlns:task="http://www.springframework.org/schema/task" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xmlns:util="http://www.springframework.org/schema/util"
+xsi:schemaLocation="http://www.springframework.org/schema/aop
+http://www.springframework.org/schema/aop/spring-aop-4.2.xsd
+http://www.springframework.org/schema/beans
+http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
+http://www.springframework.org/schema/context
+http://www.springframework.org/schema/context/spring-context-4.2.xsd
+http://www.springframework.org/schema/jee
+http://www.springframework.org/schema/jee/spring-jee-4.2.xsd
+http://www.springframework.org/schema/tx
+http://www.springframework.org/schema/tx/spring-tx-4.2.xsd
+http://www.springframework.org/schema/task
+http://www.springframework.org/schema/task/spring-task-4.2.xsd
+http://www.springframework.org/schema/util
+http://www.springframework.org/schema/util/spring-util.xsd">
+
+	<context:component-scan base-package="org.apache.ambari.logsearch.manager.dao, org.apache.ambari.logsearch.util" />
+	<task:annotation-driven />
+	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.util.XMLPropertiesUtil" />
+	
+	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.util.PropertiesUtil">
+		<property name="locations">
+			<list>
+				<value>classpath:default.properties</value>
+				<value>classpath:logsearch.properties</value>
+				<value>classpath:logsearch-admin-site.xml</value>
+			</list>
+		</property>
+		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
+	</bean>
+	
+</beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/resources/logsearch.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/logsearch.properties b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/logsearch.properties
new file mode 100755
index 0000000..fa3efb8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/logsearch.properties
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#	
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+logsearch.solr.audit.logs.config.name=test_audit_logs_config_name
+logsearch.collection.audit.logs.numshards=123
+logsearch.collection.audit.logs.replication.factor=456
+logsearch.solr.collection.audit.logs=test_audit_logs_collection
+
+logsearch.solr.service.logs.config.name=test_service_logs_config_name
+logsearch.collection.service.logs.numshards=789
+logsearch.collection.service.logs.replication.factor=987
+logsearch.solr.collection.service.logs=test_service_logs_collection
+logsearch.service.logs.split.interval.mins=1
+
+logsearch.solr.history.config.name=test_history_logs_config_name
+logsearch.collection.history.replication.factor=234
+logsearch.solr.collection.history=test_history_logs_collection
+
+logsearch.auth.file.enable=true
+logsearch.login.credentials.file=user_pass.json
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/test/resources/user_pass.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/user_pass.json b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/user_pass.json
new file mode 100644
index 0000000..0a04afe
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/user_pass.json
@@ -0,0 +1,8 @@
+{
+  "users": [{
+    "name": "Test User Name",
+    "username": "testUserName",
+    "password": "testUserPassword",
+    "en_password": ""
+  }]
+}
\ No newline at end of file


[10/50] [abbrv] ambari git commit: AMBARI-18214. Restify Log Search endpoints (oleewere)

Posted by ol...@apache.org.
AMBARI-18214. Restify Log Search endpoints (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8cf23e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8cf23e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8cf23e4

Branch: refs/heads/branch-dev-logsearch
Commit: f8cf23e429f0f084822eafe63fed0a3ffb9e77f2
Parents: dff48f0
Author: oleewere <ol...@gmail.com>
Authored: Fri Aug 19 17:31:55 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../ambari/logsearch/dao/SolrDaoBase.java       |   2 +-
 .../ambari/logsearch/doc/DocConstants.java      |   1 +
 .../ambari/logsearch/manager/AuditMgr.java      |   2 +-
 .../ambari/logsearch/manager/LogsMgr.java       | 119 ++--
 .../ambari/logsearch/rest/AuditLogsREST.java    | 283 +++++++++
 .../apache/ambari/logsearch/rest/AuditREST.java | 312 ----------
 .../ambari/logsearch/rest/DashboardREST.java    | 575 ------------------
 .../ambari/logsearch/rest/LogFileREST.java      |   5 +-
 .../ambari/logsearch/rest/PublicREST.java       |   2 +-
 .../ambari/logsearch/rest/ServiceLogsREST.java  | 578 +++++++++++++++++++
 .../ambari/logsearch/rest/UserConfigREST.java   |  17 +-
 .../src/main/webapp/login.html                  |  28 +-
 .../collection_bases/VAuditLogListBase.js       |  16 +-
 .../collection_bases/VEventHistoryListBase.js   |  14 +-
 .../scripts/collection_bases/VGroupListBase.js  |  16 +-
 .../collection_bases/VLogLevelListBase.js       |   2 +-
 .../scripts/collection_bases/VLogListBase.js    |  10 +-
 .../collection_bases/VNameValueListBase.js      |  14 +-
 .../scripts/collection_bases/VNodeListBase.js   |   4 +-
 .../webapp/scripts/model_bases/VAuditLogBase.js |   2 +-
 .../scripts/model_bases/VCommonModelBase.js     |   2 +-
 .../scripts/model_bases/VEventHistoryBase.js    |   2 +-
 .../scripts/model_bases/VGraphInfoBase.js       |   2 +-
 .../webapp/scripts/model_bases/VLogLevelBase.js |   2 +-
 .../scripts/model_bases/VUserFilterBase.js      |   2 +-
 .../src/main/webapp/scripts/utils/ViewUtils.js  |   2 +-
 .../scripts/views/audit/AuditAggregatedView.js  |   6 +-
 .../scripts/views/audit/AuditTabLayoutView.js   |  22 +-
 .../dashboard/BubbleGraphTableLayoutView.js     |  30 +-
 .../views/dashboard/ComponentListView.js        |   4 +-
 .../scripts/views/dashboard/ComponentsView.js   |   2 +-
 .../scripts/views/dashboard/DashboardView.js    |  12 +-
 .../views/dashboard/GridTableLayoutView.js      |   2 +-
 .../webapp/scripts/views/dashboard/HostsView.js |   2 +-
 .../scripts/views/dashboard/LogLevelBoxView.js  |   2 +-
 .../scripts/views/dashboard/MainLayoutView.js   |  44 +-
 .../dialog/GlobalExclusionCompositeView.js      |   2 +-
 .../views/filter/CreateLogfeederFilterView.js   |  20 +-
 .../scripts/views/graphs/GraphLayoutView.js     |  12 +-
 .../scripts/views/graphs/GridGraphLayoutView.js |   2 +-
 .../webapp/scripts/views/tabs/ComparisonView.js |   2 +-
 .../views/tabs/HierarchyTabLayoutView.js        |  36 +-
 .../webapp/scripts/views/tabs/LogFileView.js    |  12 +-
 .../main/webapp/scripts/views/tabs/TreeView.js  |   2 +-
 .../troubleshoot/TroubleShootLayoutView.js      |   6 +-
 .../src/main/webapp/templates/graphs/backup.js  |  12 +-
 .../logging/LoggingRequestHelperImpl.java       |   4 +-
 47 files changed, 1107 insertions(+), 1143 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 672507a..91c4a26 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -547,7 +547,7 @@ public abstract class SolrDaoBase {
       QueryResponse queryResponse = solrClient.query(solrQuery,
         METHOD.POST);
 
-      if (event != null && !"/getLiveLogsCount".equalsIgnoreCase(event)) {
+      if (event != null && !"/audit/logs/live/count".equalsIgnoreCase(event)) {
         logPerformance.info("\n Username :- "
           + LogsearchContextUtil.getCurrentUsername()
           + " Event :- " + event + " SolrQuery :- " + solrQuery

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
index c1572b7..c14fc08 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -105,6 +105,7 @@ public class DocConstants {
     public static final String HOST_D = "";
     public static final String COMPONENT_D = "";
     public static final String LOG_TYPE_D = "";
+    public static final String TAIL_SIZE_D = "";
   }
 
   public class LogFileOperationDescriptions {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
index d4f2986..58c3a4d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
@@ -258,7 +258,7 @@ public class AuditMgr extends MgrBase {
   public String getLiveLogCounts() {
     VNameValueList nameValueList = new VNameValueList();
     SolrQuery solrQuery = new SolrQuery();
-    solrQuery.setParam("event", "/getLiveLogsCount");
+    solrQuery.setParam("event", "/audit/logs/live/count");
     try {
       String startDate = dateUtil
         .convertGivenDateFormatToSolrDateFormat(ManageStartEndTime.startDate);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index b03a643..451fac1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -88,7 +88,7 @@ public class LogsMgr extends MgrBase {
   public static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
 
   public static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
-  
+
   public static enum CONDITION {
     OR, AND
   }
@@ -140,7 +140,7 @@ public class LogsMgr extends MgrBase {
       SolrQuery solrQuery = queryGenerator
           .commonServiceFilterQuery(searchCriteria);
 
-      solrQuery.setParam("event", "/solr/logs_search");
+      solrQuery.setParam("event", "/service/logs");
 
       VSolrLogList collection = getLogAsPaginationProvided(solrQuery,
           serviceLogsSolrDao);
@@ -151,7 +151,7 @@ public class LogsMgr extends MgrBase {
   public String getHosts(SearchCriteria searchCriteria) {
     return getFields(searchCriteria, LogSearchConstants.SOLR_HOST);
   }
-  
+
   public String getFields(SearchCriteria searchCriteria,String field){
 
     SolrQuery solrQuery = new SolrQuery();
@@ -185,7 +185,7 @@ public class LogsMgr extends MgrBase {
         solrDoc.put(field, temp);
         docList.add(solrDoc);
       }
-      
+
       collection.setGroupDocuments(docList);
       if(!docList.isEmpty()){
         collection.setStartIndex((int) docList.getStart());
@@ -197,7 +197,7 @@ public class LogsMgr extends MgrBase {
       throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-  
+
   }
 
   public String getComponents(SearchCriteria searchCriteria) {
@@ -274,11 +274,11 @@ public class LogsMgr extends MgrBase {
         return collection;
       }
       List<Count> fieldList = facetFields.getValues();
-      
+
       if(fieldList == null){
         return collection;
       }
-      
+
       for (Count cnt : fieldList) {
         if (cnt != null) {
           VCount vCount = new VCount();
@@ -293,11 +293,11 @@ public class LogsMgr extends MgrBase {
       throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-    
+
     collection.setCounts(vCounts);
     return collection;
   }
-  
+
   public VCountList getLogLevelCount(SearchCriteria searchCriteria) {
     return getFieldCount(searchCriteria, LogSearchConstants.SOLR_LEVEL);
   }
@@ -331,7 +331,7 @@ public class LogsMgr extends MgrBase {
           if(!stringUtil.isEmpty(firstPriority)){
             hostNode.setType(firstPriority);
           }
-          
+
           hostNode.setParent(true);
           hostNode.setRoot(true);
           PivotField hostPivot = null;
@@ -467,7 +467,7 @@ public class LogsMgr extends MgrBase {
 
   public String getHostListByComponent(SearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.setParam("event", "/getHostListByComponent");
+    solrQuery.setParam("event", "/service/hosts/component");
 
     VNodeList list = new VNodeList();
     if (searchCriteria.getSortBy() == null) {
@@ -484,10 +484,10 @@ public class LogsMgr extends MgrBase {
     } else {
       return convertObjToString(list);
     }
-    
+
     String firstHirarchy = "type,host,level";
     String secondHirarchy = "type,level";
-   
+
     try {
       queryGenerator.setFacetPivot(solrQuery, 1, firstHirarchy,
         secondHirarchy);
@@ -528,7 +528,7 @@ public class LogsMgr extends MgrBase {
   public VNameValueList getLogsLevelCount(SearchCriteria sc) {
     VNameValueList nameValueList = new VNameValueList();
     SolrQuery query = queryGenerator.commonServiceFilterQuery(sc);
-    query.setParam("event", "/getLogLevelCounts");
+    query.setParam("event", "/service/logs/levels/counts/namevalues");
     List<VNameValue> logsCounts = getLogLevelFacets(query);
     nameValueList.setVNameValues(logsCounts);
 
@@ -628,40 +628,40 @@ public class LogsMgr extends MgrBase {
   public List<Count> getFacetCounts(SolrQuery solrQuery, String facetField)
     throws SolrServerException, IOException, SolrException {
     List<Count> list = new ArrayList<FacetField.Count>();
-    
+
     QueryResponse response = serviceLogsSolrDao.process(solrQuery);
     if(response == null){
       return list;
     }
-    
+
     FacetField field = response.getFacetField(facetField);
     if (field == null) {
       return list;
     }
     list = field.getValues();
-    
-    
+
+
     return list;
   }
 
   public String getPageByKeyword(SearchCriteria searchCriteria)
     throws SolrServerException {
     String defaultChoice = "0";
-    
+
     String key = (String) searchCriteria.getParamValue("keyword");
     if(stringUtil.isEmpty(key)){
       throw restErrorUtil.createRESTException("Keyword was not given",
           MessageEnums.DATA_NOT_FOUND);
     }
-    
+
     String keyword = solrUtil.escapeForStandardTokenizer(key);
-    
+
     if(keyword.startsWith("\"") && keyword.endsWith("\"")){
       keyword = keyword.substring(1);
       keyword = keyword.substring(0, keyword.length()-1);
     }
     keyword = "*" + keyword + "*";
-   
+
 
     String keyType = (String) searchCriteria.getParamValue("keywordType");
     QueryResponse queryResponse = null;
@@ -684,7 +684,7 @@ public class LogsMgr extends MgrBase {
         nextPageLogTimeQuery.remove("rows");
         nextPageLogTimeQuery.setStart(lastLogIndexNumber);
         nextPageLogTimeQuery.setRows(1);
-        
+
         queryResponse = serviceLogsSolrDao.process(
             nextPageLogTimeQuery);
         if(queryResponse == null){
@@ -697,7 +697,7 @@ public class LogsMgr extends MgrBase {
           throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
-        
+
         SolrDocument solrDoc = docList.get(0);
 
         Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
@@ -765,7 +765,7 @@ public class LogsMgr extends MgrBase {
 
         if (!stringUtil.isEmpty(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-          
+
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, nextPageLogTime,
             endTime);
@@ -774,7 +774,7 @@ public class LogsMgr extends MgrBase {
               + LogSearchConstants.ASCENDING_ORDER);
 
         } else {
-          
+
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, startTime,
             nextPageLogTime);
@@ -803,7 +803,7 @@ public class LogsMgr extends MgrBase {
         if (!documentList.isEmpty()){
           solrDocument = documentList.get(0);
         }
-        
+
         Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
         if(keywordLogDate == null){
           throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
@@ -837,9 +837,9 @@ public class LogsMgr extends MgrBase {
 
 
         long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
-      
 
-        //Adding numbers on 
+
+        //Adding numbers on
 
 
         try {
@@ -854,7 +854,7 @@ public class LogsMgr extends MgrBase {
             String id = (String) solrDocumenent
               .getFieldValue(LogSearchConstants.ID);
             countNumberLogs++;
-           
+
             if (stringUtil.isEmpty(id) && id.equals(keywordId)){
               break;
             }
@@ -980,8 +980,8 @@ public class LogsMgr extends MgrBase {
           logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.DESCENDING_ORDER);
-          
-          
+
+
           queryGenerator.setSingleRangeFilter(
             logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, startTime,
@@ -993,7 +993,7 @@ public class LogsMgr extends MgrBase {
           logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
             LogSearchConstants.LOGTIME + " "
               + LogSearchConstants.ASCENDING_ORDER);
-          
+
 
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
             LogSearchConstants.LOGTIME, lastLogsLogTime, endTime);
@@ -1054,7 +1054,7 @@ public class LogsMgr extends MgrBase {
 
 
         long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
-        
+
         //Adding numbers on
         try {
           SolrQuery sameIdQuery = queryGenerator
@@ -1123,9 +1123,9 @@ public class LogsMgr extends MgrBase {
           .process(logTimeByIdQuery);
 
       if(queryResponse == null){
-        return convertObjToString(new VSolrLogList()); 
+        return convertObjToString(new VSolrLogList());
       }
-      
+
       SolrDocumentList docList = queryResponse.getResults();
       Date dateOfLogId = null;
       if (docList != null && !docList.isEmpty()) {
@@ -1162,10 +1162,10 @@ public class LogsMgr extends MgrBase {
       queryGenerator.setSingleIncludeFilter(sameIdQuery,
           LogSearchConstants.LOGTIME, "\"" + logTime + "\"");
       sameIdQuery.set("fl", LogSearchConstants.ID);
-      
+
       QueryResponse sameIdResponse = serviceLogsSolrDao.process(sameIdQuery);
       SolrDocumentList docList = sameIdResponse.getResults();
-      
+
       for (SolrDocument solrDocumenent : docList) {
         String id = (String) solrDocumenent
             .getFieldValue(LogSearchConstants.ID);
@@ -1216,14 +1216,14 @@ public class LogsMgr extends MgrBase {
         return logsCounts;
 
       }
-      
+
       @SuppressWarnings("rawtypes")
       RangeFacet rangeFacet=rangeFacetList.get(0);
       if (rangeFacet == null) {
         return logsCounts;
       }
       logLevelCounts = rangeFacet.getCounts();
-      
+
       if(logLevelCounts == null){
         return logsCounts;
       }
@@ -1254,7 +1254,7 @@ public class LogsMgr extends MgrBase {
     String deafalutValue = "0";
     VBarDataList dataList = new VBarDataList();
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.set("event", "/getHistogramData");
+    solrQuery.set("event", "/audit/logs/histogram");
     String from = getFrom((String) searchCriteria.getParamValue("from"));
     String to = getTo((String) searchCriteria.getParamValue("to"));
     String unit = getUnit((String) searchCriteria.getParamValue("unit"));
@@ -1339,10 +1339,7 @@ public class LogsMgr extends MgrBase {
     }
   }
 
-  public String cancelFindRequestByDate(HttpServletRequest request) {
-    String uniqueId = null;
-
-    uniqueId = (String) request.getParameter("token");
+  public String cancelFindRequestByDate(String uniqueId) {
     if (stringUtil.isEmpty(uniqueId)) {
       logger.error("Unique id is Empty");
       throw restErrorUtil.createRESTException("Unique id is Empty",
@@ -1377,21 +1374,21 @@ public class LogsMgr extends MgrBase {
     String to = (String) searchCriteria.getParamValue("to");
     String utcOffset = (String) searchCriteria.getParamValue("utcOffset");
     String format = (String) searchCriteria.getParamValue("format");
-    
+
     format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
         : ".json";
-    
+
     if(stringUtil.isEmpty(utcOffset)){
       utcOffset = "0";
     }
-    
+
     if (!dateUtil.isDateValid(from) || !dateUtil.isDateValid(to)) {
       logger.error("Not valid date format. Valid format should be"
           + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
       throw restErrorUtil.createRESTException("Not valid date format. Valid format should be"
           + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z,
           MessageEnums.INVALID_INPUT_DATA);
-      
+
     } else {
       from = from.replace("T", " ");
       from = from.replace(".", ",");
@@ -1411,7 +1408,7 @@ public class LogsMgr extends MgrBase {
       fileName = searchCriteria.getParamValue("hostLogFile") + "_"
         + searchCriteria.getParamValue("compLogFile");
     }
-    
+
     String textToSave = "";
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
@@ -1516,7 +1513,7 @@ public class LogsMgr extends MgrBase {
 
   public String getComponentListWithLevelCounts(SearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    solrQuery.setParam("event", "/getComponentListWithLevelCounts");
+    solrQuery.setParam("event", "/service/logs/components/level/counts");
 
     if (searchCriteria.getSortBy() == null) {
       searchCriteria.setSortBy(LogSearchConstants.SOLR_COMPONENT);
@@ -1586,10 +1583,10 @@ public class LogsMgr extends MgrBase {
       queryGenerator.setMainQuery(solrQuery, null);
       solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.asc);
       queryGenerator.setRowCount(solrQuery, 1);
-     
+
       List<VNameValue> vNameValues = new ArrayList<VNameValue>();
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      
+
       if(response == null){
         return convertObjToString(nameValueList);
       }
@@ -1635,7 +1632,7 @@ public class LogsMgr extends MgrBase {
         }
       }
       nameValueList.setVNameValues(vNameValues);
-      
+
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e.getMessage() + "SolrQuery"+solrQuery);
@@ -1789,7 +1786,7 @@ public class LogsMgr extends MgrBase {
     if(stringUtil.isEmpty(scrollType)){
       scrollType = "";
     }
-    
+
     String logTime = null;
     String sequenceId = null;
     try {
@@ -1827,7 +1824,7 @@ public class LogsMgr extends MgrBase {
       }
       vSolrLogList.setSolrDocuments(solrDocList);
         return convertObjToString(vSolrLogList);
-     
+
     } else if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) {
       SolrDocumentList solrDocList = new SolrDocumentList();
       vSolrLogList = new VSolrLogList();
@@ -1859,9 +1856,9 @@ public class LogsMgr extends MgrBase {
       }
 
       vSolrLogList.setSolrDocuments(initial);
-     
+
         return convertObjToString(vSolrLogList);
-      
+
     }
   }
 
@@ -1964,4 +1961,4 @@ public class LogsMgr extends MgrBase {
   }
 
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
new file mode 100644
index 0000000..3d99dc0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
@@ -0,0 +1,283 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.rest;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.manager.AuditMgr;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*;
+
+@Api(value = "audit/logs", description = "Audit log operations")
+@Path("audit/logs")
+@Component
+@Scope("request")
+public class AuditLogsREST {
+
+  @Autowired
+  AuditMgr auditMgr;
+
+  @GET
+  @Path("/schema/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD)
+  public String getSolrFieldList(@Context HttpServletRequest request) {
+    return auditMgr.getAuditLogsSchemaFieldsName();
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query")
+  })
+  public String getAuditLogs(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("isLastPage", request.getParameter("isLastPage"));
+    return auditMgr.getLogs(searchCriteria);
+  }
+
+  @GET
+  @Path("/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_COMPONENTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+  })
+  public String getAuditComponents(@Context HttpServletRequest request) {
+
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addParam("q", request.getParameter("q"));
+    return auditMgr.getAuditComponents(searchCriteria);
+  }
+
+  @GET
+  @Path("/linegraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
+  public String getAuditLineGraphData(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("unit", request.getParameter("unit"));
+    return auditMgr.getAuditLineGraphData(searchCriteria);
+  }
+
+  @GET
+  @Path("/users")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_USERS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
+  })
+  public String getTopAuditUsers(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("field", request.getParameter("field"));
+    return auditMgr.topTenUsers(searchCriteria);
+  }
+
+  @GET
+  @Path("/resources")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
+  })
+  public String getTopAuditResources(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("field", request.getParameter("field"));
+    //return auditMgr.getTopAuditFieldCount(searchCriteria);
+    return auditMgr.topTenResources(searchCriteria);
+
+  }
+
+  @GET
+  @Path("/live/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LIVE_LOGS_COUNT_OD)
+  public String getLiveLogsCount() {
+    return auditMgr.getLiveLogCounts();
+  }
+
+  @GET
+  @Path("/request/user/linegraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_REQUEST_USER_LINE_GRAPH_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
+  public String getRequestUserLineGraph(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("field", request.getParameter("field"));
+    searchCriteria.addParam("unit", request.getParameter("unit"));
+    return auditMgr.getRequestUserLineGraph(searchCriteria);
+  }
+
+  @GET
+  @Path("/anygraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
+  public String getAnyGraphData(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
+    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
+    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
+    searchCriteria.addParam("from", request.getParameter("from"));
+    searchCriteria.addParam("to", request.getParameter("to"));
+    searchCriteria.addParam("unit", request.getParameter("unit"));
+    return auditMgr.getAnyGraphData(searchCriteria);
+  }
+
+  @GET
+  @Path("/users/export")
+  @Produces({"application/json"})
+  @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query")
+  })
+  public Response exportUserTableToTextFile(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    searchCriteria.addParam("field", request.getParameter("field"));
+    searchCriteria.addParam("format", request.getParameter("format"));
+    return auditMgr.exportUserTableToTextFile(searchCriteria);
+  }
+
+  @GET
+  @Path("/serviceload")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOAD_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+  })
+  public String getServiceLoad(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredAuditLogsParams(request);
+    return auditMgr.getServiceLoad(searchCriteria);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
deleted file mode 100644
index 5ed49fd..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
+++ /dev/null
@@ -1,312 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.AuditMgr;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*;
-
-@Api(value = "audit", description = "Audit operations")
-@Path("audit")
-@Component
-@Scope("request")
-public class AuditREST {
-
-  @Autowired
-  AuditMgr auditMgr;
-
-  @GET
-  @Path("/getAuditSchemaFieldsName")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD)
-  public String getSolrFieldList(@Context HttpServletRequest request) {
-    return auditMgr.getAuditLogsSchemaFieldsName();
-  }
-
-  @GET
-  @Path("/getAuditLogs")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query")
-  })
-  public String getAuditLogs(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("isLastPage", request.getParameter("isLastPage"));
-    return auditMgr.getLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/getAuditComponents")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_COMPONENTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-  })
-  public String getAuditComponents(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return auditMgr.getAuditComponents(searchCriteria);
-  }
-
-  @GET
-  @Path("/getAuditLineGraphData")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getAuditLineGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getAuditLineGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/getTopAuditUsers")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TOP_AUDIT_USERS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
-  })
-  public String getTopAuditUsers(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    return auditMgr.topTenUsers(searchCriteria);
-  }
-
-  @GET
-  @Path("/getTopAuditResources")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
-  })
-  public String getTopAuditResources(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    //return auditMgr.getTopAuditFieldCount(searchCriteria);
-    return auditMgr.topTenResources(searchCriteria);
-
-
-  }
-
-  @GET
-  @Path("/getTopAuditComponents")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TOP_AUDIT_COMPONENTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getTopAuditComponents(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getTopAuditFieldCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/getLiveLogsCount")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LIVE_LOGS_COUNT_OD)
-  public String getLiveLogsCount() {
-    return auditMgr.getLiveLogCounts();
-  }
-
-  @GET
-  @Path("/getRequestUserLineGraph")
-  @Produces({"application/json"})
-  @ApiOperation(GET_REQUEST_USER_LINE_GRAPH_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getRequestUserLineGraph(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getRequestUserLineGraph(searchCriteria);
-  }
-
-  @GET
-  @Path("/getAnyGraphData")
-  @Produces({"application/json"})
-  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getAnyGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
-    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
-    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
-    searchCriteria.addParam("from", request.getParameter("from"));
-    searchCriteria.addParam("to", request.getParameter("to"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getAnyGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/exportUserTableToTextFile")
-  @Produces({"application/json"})
-  @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query")
-  })
-  public Response exportUserTableToTextFile(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    searchCriteria.addParam("format", request.getParameter("format"));
-    return auditMgr.exportUserTableToTextFile(searchCriteria);
-  }
-
-  @GET
-  @Path("/getServiceLoad")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOAD_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-  })
-  public String getServiceLoad(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    return auditMgr.getServiceLoad(searchCriteria);
-  }
-
-}
- 
\ No newline at end of file


[30/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
new file mode 100644
index 0000000..091194e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.common.PropertiesHelper;
+
+public class SearchCriteria {
+  private int startIndex = 0;
+  private int maxRows = Integer.MAX_VALUE;
+  private String sortBy = null;
+  private String sortType = null;
+  private int page = 0;
+
+  private String globalStartTime = null;
+  private String globalEndTime = null;
+
+  private HashMap<String, Object> paramList = new HashMap<String, Object>();
+
+  private Map<String, Object> urlParamMap = new HashMap<String, Object>();
+
+  public SearchCriteria() {
+    // Auto-generated constructor stub
+  }
+
+  public int getStartIndex() {
+    return startIndex;
+  }
+
+  public void setStartIndex(int startIndex) {
+    this.startIndex = startIndex;
+  }
+
+  public int getMaxRows() {
+    return maxRows;
+  }
+
+  public void setMaxRows(int maxRows) {
+    this.maxRows = maxRows;
+  }
+
+  public String getSortType() {
+    return sortType;
+  }
+
+
+  public void addParam(String name, Object value) {
+    String solrValue = PropertiesHelper.getProperty(name);
+    if (solrValue == null || solrValue.isEmpty()) {
+      paramList.put(name, value);
+    } else {
+      try {
+        String propertyFieldMappings[] = solrValue.split(",");
+        HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
+        for (String temp : propertyFieldMappings) {
+          String arrayValue[] = temp.split(":");
+          propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH), arrayValue[1].toLowerCase(Locale.ENGLISH));
+        }
+        String originalValue = propertyFieldValue.get(value.toString().toLowerCase(Locale.ENGLISH));
+        if (originalValue != null && !originalValue.isEmpty())
+          paramList.put(name, originalValue);
+
+      } catch (Exception e) {
+        //do nothing
+      }
+    }
+  }
+
+  public Object getParamValue(String name) {
+    return paramList.get(name);
+  }
+
+  public String getSortBy() {
+    return sortBy;
+  }
+
+  public void setSortBy(String sortBy) {
+    this.sortBy = sortBy;
+  }
+
+  public void setSortType(String sortType) {
+    this.sortType = sortType;
+  }
+
+  public int getPage() {
+    return page;
+  }
+
+  public void setPage(int page) {
+    this.page = page;
+  }
+
+  public String getGlobalStartTime() {
+    return globalStartTime;
+  }
+
+  public void setGlobalStartTime(String globalStartTime) {
+    this.globalStartTime = globalStartTime;
+  }
+
+  public String getGlobalEndTime() {
+    return globalEndTime;
+  }
+
+  public void setGlobalEndTime(String globalEndTime) {
+    this.globalEndTime = globalEndTime;
+  }
+
+  public Map<String, Object> getUrlParamMap() {
+    return urlParamMap;
+  }
+
+  public void setUrlParamMap(Map<String, Object> urlParamMap) {
+    this.urlParamMap = urlParamMap;
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
new file mode 100644
index 0000000..0ef5bdf
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceAnyGraphSearchCriteria extends ServiceLogFileSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
new file mode 100644
index 0000000..3fc6ff8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceExtremeDatesCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
new file mode 100644
index 0000000..31a57a4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceGraphSearchCriteria extends ServiceLogFileSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
new file mode 100644
index 0000000..8bab7f0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceLogExportSearchCriteria extends ServiceLogFileSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
new file mode 100644
index 0000000..a9f5926
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceLogFileSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
new file mode 100644
index 0000000..d41c589
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceLogSearchCriteria extends ServiceLogFileSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
new file mode 100644
index 0000000..24dc9a8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class ServiceLogTruncatedSearchCriteria extends ServiceLogFileSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
new file mode 100644
index 0000000..8798cd6
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class UserConfigSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
new file mode 100644
index 0000000..755c673
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class UserExportSearchCriteria extends FieldAuditLogSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
deleted file mode 100644
index 3d99dc0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsREST.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.AuditMgr;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*;
-
-@Api(value = "audit/logs", description = "Audit log operations")
-@Path("audit/logs")
-@Component
-@Scope("request")
-public class AuditLogsREST {
-
-  @Autowired
-  AuditMgr auditMgr;
-
-  @GET
-  @Path("/schema/fields")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD)
-  public String getSolrFieldList(@Context HttpServletRequest request) {
-    return auditMgr.getAuditLogsSchemaFieldsName();
-  }
-
-  @GET
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query")
-  })
-  public String getAuditLogs(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("isLastPage", request.getParameter("isLastPage"));
-    return auditMgr.getLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/components")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_COMPONENTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-  })
-  public String getAuditComponents(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return auditMgr.getAuditComponents(searchCriteria);
-  }
-
-  @GET
-  @Path("/linegraph")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getAuditLineGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getAuditLineGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/users")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TOP_AUDIT_USERS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
-  })
-  public String getTopAuditUsers(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    return auditMgr.topTenUsers(searchCriteria);
-  }
-
-  @GET
-  @Path("/resources")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
-  })
-  public String getTopAuditResources(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    //return auditMgr.getTopAuditFieldCount(searchCriteria);
-    return auditMgr.topTenResources(searchCriteria);
-
-  }
-
-  @GET
-  @Path("/live/count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LIVE_LOGS_COUNT_OD)
-  public String getLiveLogsCount() {
-    return auditMgr.getLiveLogCounts();
-  }
-
-  @GET
-  @Path("/request/user/linegraph")
-  @Produces({"application/json"})
-  @ApiOperation(GET_REQUEST_USER_LINE_GRAPH_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getRequestUserLineGraph(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getRequestUserLineGraph(searchCriteria);
-  }
-
-  @GET
-  @Path("/anygraph")
-  @Produces({"application/json"})
-  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
-  })
-  public String getAnyGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
-    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
-    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
-    searchCriteria.addParam("from", request.getParameter("from"));
-    searchCriteria.addParam("to", request.getParameter("to"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return auditMgr.getAnyGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/users/export")
-  @Produces({"application/json"})
-  @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query")
-  })
-  public Response exportUserTableToTextFile(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    searchCriteria.addParam("field", request.getParameter("field"));
-    searchCriteria.addParam("format", request.getParameter("format"));
-    return auditMgr.exportUserTableToTextFile(searchCriteria);
-  }
-
-  @GET
-  @Path("/serviceload")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOAD_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-  })
-  public String getServiceLoad(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredAuditLogsParams(request);
-    return auditMgr.getServiceLoad(searchCriteria);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
new file mode 100644
index 0000000..82e21e8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.model.request.impl.AnyGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.BaseAuditLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.FieldAuditBarGraphRequest;
+import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest;
+import org.apache.ambari.logsearch.model.request.impl.SimpleQueryRequest;
+import org.apache.ambari.logsearch.model.request.impl.UserExportRequest;
+import org.apache.ambari.logsearch.model.response.AuditLogResponse;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.GroupListResponse;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.query.model.AnyGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.AuditLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.AuditBarGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.FieldAuditLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.FieldAuditBarGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest;
+import org.apache.ambari.logsearch.manager.AuditLogsManager;
+import org.apache.ambari.logsearch.query.model.UserExportSearchCriteria;
+import org.springframework.context.annotation.Scope;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*;
+
+@Api(value = "audit/logs", description = "Audit log operations")
+@Path("audit/logs")
+@Component
+@Scope("request")
+public class AuditLogsResource {
+
+  @Inject
+  private AuditLogsManager auditLogsManager;
+
+  @Inject
+  private ConversionService conversionService;
+
+  @GET
+  @Path("/schema/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD)
+  public String getSolrFieldList() {
+    return auditLogsManager.getAuditLogsSchemaFieldsName();
+  }
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LOGS_OD)
+  public AuditLogResponse getAuditLogs(@BeanParam AuditLogRequest auditLogRequest) {
+    return auditLogsManager.getLogs(conversionService.convert(auditLogRequest, AuditLogSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_COMPONENTS_OD)
+  public GroupListResponse getAuditComponents(@BeanParam SimpleQueryRequest request) {
+    return auditLogsManager.getAuditComponents(conversionService.convert(request, SearchCriteria.class));
+  }
+
+  @GET
+  @Path("/bargraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD)
+  public BarGraphDataListResponse getAuditBarGraphData(@BeanParam AuditBarGraphRequest request) {
+    return auditLogsManager.getAuditBarGraphData(conversionService.convert(request, AuditBarGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/users")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_USERS_OD)
+  public BarGraphDataListResponse getTopAuditUsers(@BeanParam FieldAuditBarGraphRequest request) {
+    return auditLogsManager.topTenUsers(conversionService.convert(request, FieldAuditBarGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/resources")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD)
+  public BarGraphDataListResponse getTopAuditResources(@BeanParam FieldAuditLogRequest request) {
+    return auditLogsManager.topTenResources(conversionService.convert(request, FieldAuditLogSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/live/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LIVE_LOGS_COUNT_OD)
+  public NameValueDataListResponse getLiveLogsCount() {
+    return auditLogsManager.getLiveLogCounts();
+  }
+
+  @GET
+  @Path("/request/user/bargraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_REQUEST_USER_LINE_GRAPH_OD)
+  public BarGraphDataListResponse getRequestUserBarGraph(@BeanParam FieldAuditBarGraphRequest request) {
+    return auditLogsManager.getRequestUserLineGraph(conversionService.convert(request, FieldAuditBarGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/anygraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  public BarGraphDataListResponse getAnyGraphData(@BeanParam AnyGraphRequest request) {
+    return auditLogsManager.getAnyGraphData(conversionService.convert(request, AnyGraphSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/users/export")
+  @Produces({"application/json"})
+  @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD)
+  public Response exportUserTableToTextFile(@BeanParam UserExportRequest request) {
+    return auditLogsManager.exportUserTableToTextFile(conversionService.convert(request, UserExportSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/serviceload")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOAD_OD)
+  public BarGraphDataListResponse getServiceLoad(@BeanParam BaseAuditLogRequest request) {
+    return auditLogsManager.getServiceLoad(conversionService.convert(request, CommonSearchCriteria.class));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
deleted file mode 100644
index 6099e0f..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.LogFileMgr;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.LogFileOperationDescriptions.*;
-
-@Api(value = "logfile", description = "Logfile operations")
-@Path("logfile")
-@Component
-@Scope("request")
-public class LogFileREST {
-
-  @Autowired
-  LogFileMgr logFileMgr;
-
-  @GET
-  @Produces({"application/json"})
-  @ApiOperation(SEARCH_LOG_FILES_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query")
-  })
-  public String searchLogFiles(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("component", request.getParameter("component"));
-    searchCriteria.addParam("host", request.getParameter("host"));
-    searchCriteria.addParam("logType", request.getParameter("logType"));
-    return logFileMgr.searchLogFiles(searchCriteria);
-  }
-
-  @GET
-  @Path("/tail")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LOG_FILE_TAIL_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TAIL_SIZE_D, name = "tailSize", dataType = "string", paramType = "query")
-  })
-  public String getLogFileTail(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("host", request.getParameter("host"));
-    searchCriteria.addParam("component", request.getParameter("component"));
-    searchCriteria.addParam("name", request.getParameter("name"));
-    searchCriteria.addParam("tailSize", request.getParameter("tailSize"));
-    return logFileMgr.getLogFileTail(searchCriteria);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileResource.java
new file mode 100644
index 0000000..c23f457
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileResource.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.ws.rs.BeanParam;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.model.request.impl.LogFileRequest;
+import org.apache.ambari.logsearch.model.request.impl.LogFileTailRequest;
+import org.apache.ambari.logsearch.model.response.LogFileDataListResponse;
+import org.apache.ambari.logsearch.model.response.LogListResponse;
+import org.apache.ambari.logsearch.query.model.LogFileSearchCriteria;
+import org.apache.ambari.logsearch.query.model.LogFileTailSearchCriteria;
+import org.apache.ambari.logsearch.manager.LogFileManager;
+import org.springframework.context.annotation.Scope;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileOperationDescriptions.*;
+
+@Api(value = "logfile", description = "Logfile operations")
+@Path("logfile")
+@Component
+@Scope("request")
+public class LogFileResource {
+
+  @Inject
+  private LogFileManager logFileManager;
+
+  @Inject
+  private ConversionService conversionService;
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(SEARCH_LOG_FILES_OD)
+  public LogFileDataListResponse searchLogFiles(@BeanParam LogFileRequest request) {
+    return logFileManager.searchLogFiles(conversionService.convert(request, LogFileSearchCriteria.class));
+  }
+
+  @GET
+  @Path("/tail")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LOG_FILE_TAIL_OD)
+  public LogListResponse getLogFileTail(@BeanParam LogFileTailRequest request) {
+    return logFileManager.getLogFileTail(conversionService.convert(request, LogFileTailSearchCriteria.class));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
deleted file mode 100644
index 5218f5d..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.rest;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.manager.PublicMgr;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.OBTAIN_GENERAL_CONFIG_OD;
-
-@Api(value = "public", description = "Public operations")
-@Path("public")
-@Component
-@Scope("request")
-public class PublicREST {
-
-  @Autowired
-  PublicMgr generalMgr;
-
-  @GET
-  @Path("/config")
-  @ApiOperation(OBTAIN_GENERAL_CONFIG_OD)
-  public String getGeneralConfig() {
-    return generalMgr.getGeneralConfig();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicResource.java
new file mode 100644
index 0000000..94bf059
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicResource.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.manager.PublicManager;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.OBTAIN_GENERAL_CONFIG_OD;
+
+@Api(value = "public", description = "Public operations")
+@Path("public")
+@Component
+@Scope("request")
+public class PublicResource {
+
+  @Inject
+  private PublicManager publicManager;
+
+  @GET
+  @Path("/config")
+  @ApiOperation(OBTAIN_GENERAL_CONFIG_OD)
+  public String getGeneralConfig() {
+    return publicManager.getGeneralConfig();
+  }
+}


[03/50] [abbrv] ambari git commit: AMBARI-18095. Use Zookeeper *.log file instead of *.out (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
AMBARI-18095. Use Zookeeper *.log file instead of *.out (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff4becac
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff4becac
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff4becac

Branch: refs/heads/branch-dev-logsearch
Commit: ff4becac20ec311adf49b3a8c9cea579f992c167
Parents: 263a2c3
Author: oleewere <ol...@gmail.com>
Authored: Thu Aug 18 13:20:59 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:57 2016 +0200

----------------------------------------------------------------------
 .../0.5.0/package/templates/input.config-zookeeper.json.j2       | 4 ++--
 .../ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml            | 4 ++--
 .../ZOOKEEPER/3.4.5/package/scripts/params_windows.py            | 2 +-
 .../common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py | 2 +-
 4 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4becac/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
index 083bd8a..5525a64 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
@@ -20,7 +20,7 @@
     {
       "type":"zookeeper",
       "rowtype":"service",
-      "path":"{{zk_log_dir}}/zookeeper*.out"
+      "path":"{{zk_log_dir}}/zookeeper*.log"
     }
 
   ],
@@ -53,4 +53,4 @@
 
   ]
 
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4becac/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
index 70e86d7..04f872c 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-log4j.xml
@@ -52,7 +52,7 @@
 #
 
 # DEFAULT: console appender only
-log4j.rootLogger=INFO, CONSOLE
+log4j.rootLogger=INFO, CONSOLE, ROLLINGFILE
 
 # Example with rolling log file
 #log4j.rootLogger=DEBUG, CONSOLE, ROLLINGFILE
@@ -73,7 +73,7 @@ log4j.appender.CONSOLE.layout.ConversionPattern=%d{ISO8601} - %-5p [%t:%C{1}@%L]
 #    Log DEBUG level and above messages to a log file
 log4j.appender.ROLLINGFILE=org.apache.log4j.RollingFileAppender
 log4j.appender.ROLLINGFILE.Threshold=DEBUG
-log4j.appender.ROLLINGFILE.File=zookeeper.log
+log4j.appender.ROLLINGFILE.File={{zk_log_dir}}/zookeeper.log
 
 # Max log file size of 10MB
 log4j.appender.ROLLINGFILE.MaxFileSize=10MB

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4becac/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_windows.py
index a83214d..de8cb51 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_windows.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/params_windows.py
@@ -38,7 +38,7 @@ except:
 hadoop_user = config["configurations"]["cluster-env"]["hadoop.user.name"]
 zk_user = hadoop_user
 
-# notused zk_log_dir = config['configurations']['zookeeper-env']['zk_log_dir']
+zk_log_dir = config['configurations']['zookeeper-env']['zk_log_dir']
 zk_data_dir = ensure_double_backslashes(config['configurations']['zoo.cfg']['dataDir'])
 tickTime = config['configurations']['zoo.cfg']['tickTime']
 initLimit = config['configurations']['zoo.cfg']['initLimit']

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4becac/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py
index 18e423c..a04d393 100644
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/scripts/zookeeper.py
@@ -95,7 +95,7 @@ def zookeeper(type = None, upgrade_type=None):
          mode=0644,
          group=params.user_group,
          owner=params.zk_user,
-         content=params.log4j_props
+         content=InlineTemplate(params.log4j_props)
     )
   elif (os.path.exists(os.path.join(params.config_dir, "log4j.properties"))):
     File(os.path.join(params.config_dir, "log4j.properties"),


[12/50] [abbrv] ambari git commit: AMBARI-18248. Parsing of /var/log/messages and /var/log/secure (Hayat Behlim via oleewere)

Posted by ol...@apache.org.
AMBARI-18248. Parsing of /var/log/messages and /var/log/secure (Hayat Behlim via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9c293088
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9c293088
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9c293088

Branch: refs/heads/branch-dev-logsearch
Commit: 9c2930886b8661c622fe7614442efd513074994a
Parents: 82e2d1b
Author: oleewere <ol...@gmail.com>
Authored: Thu Aug 25 11:49:48 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:59 2016 +0200

----------------------------------------------------------------------
 .../ambari/logfeeder/mapper/MapperDate.java     | 48 ++++++++++----
 .../apache/ambari/logfeeder/util/DateUtil.java  |  6 --
 .../ambari/logfeeder/mapper/MapperDateTest.java | 66 ++++++++++++++++++--
 .../src/main/resources/HadoopServiceConfig.json | 15 ++++-
 .../test-config/logfeeder/logfeeder.properties  |  6 +-
 .../shipper-conf/input.config-secure_log.json   | 31 +++++++++
 .../input.config-system_message.json            | 31 +++++++++
 .../test-config/logsearch/logsearch.properties  |  4 +-
 .../docker/test-logs/secure_log/secure-log.txt  | 11 ++++
 .../test-logs/system_message/message_logs.txt   | 17 +++++
 .../templates/input.config-accumulo.json.j2     |  4 +-
 .../templates/input.config-ambari.json.j2       | 14 ++---
 .../package/templates/input.config-ams.json.j2  |  8 +--
 .../templates/input.config-atlas.json.j2        |  2 +-
 .../templates/input.config-falcon.json.j2       |  2 +-
 .../templates/input.config-flume.json.j2        |  2 +-
 .../templates/input.config-hbase.json.j2        |  4 +-
 .../package/templates/input.config-hdfs.json.j2 |  4 +-
 .../package/templates/input.config-hive.json.j2 |  4 +-
 .../package/templates/input.config-hst.json.j2  |  4 +-
 .../templates/input.config-infra.json.j2        |  2 +-
 .../templates/input.config-kafka.json.j2        |  4 +-
 .../package/templates/input.config-knox.json.j2 |  2 +-
 .../package/templates/input.config-nifi.json.j2 |  2 +-
 .../templates/input.config-oozie.json.j2        |  2 +-
 .../templates/input.config-ranger.json.j2       |  6 +-
 .../templates/input.config-spark.json.j2        |  2 +-
 .../templates/input.config-spark2.json.j2       |  2 +-
 .../templates/input.config-storm.json.j2        |  2 +-
 .../package/templates/input.config-yarn.json.j2 |  2 +-
 .../templates/input.config-zeppelin.json.j2     |  2 +-
 .../templates/input.config-zookeeper.json.j2    |  2 +-
 32 files changed, 246 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
index 9aa0b23..6dbf8be 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
@@ -20,10 +20,12 @@
 package org.apache.ambari.logfeeder.mapper;
 
 import java.text.SimpleDateFormat;
+import java.util.Calendar;
 import java.util.Date;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang.time.DateUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -31,9 +33,9 @@ import org.apache.log4j.Logger;
 public class MapperDate extends Mapper {
   private static final Logger logger = Logger.getLogger(MapperDate.class);
 
-  private String dateFormat = null;
-  private SimpleDateFormat dateFormatter = null;
+  private SimpleDateFormat targetDateFormatter = null;
   private boolean isEpoch = false;
+  private SimpleDateFormat srcDateFormatter=null;
 
   @Override
   public boolean init(String inputDesc, String fieldName,
@@ -48,24 +50,28 @@ public class MapperDate extends Mapper {
     }
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
-    dateFormat = (String) mapObjects.get("date_pattern");
-    if (StringUtils.isEmpty(dateFormat)) {
+    String targetDateFormat = (String) mapObjects.get("target_date_pattern");
+    String srcDateFormat = (String) mapObjects.get("src_date_pattern");
+    if (StringUtils.isEmpty(targetDateFormat)) {
       logger.fatal("Date format for map is empty. " + this.toString());
     } else {
-      logger.info("Date mapper format is " + dateFormat);
+      logger.info("Date mapper format is " + targetDateFormat);
 
-      if (dateFormat.equalsIgnoreCase("epoch")) {
+      if (targetDateFormat.equalsIgnoreCase("epoch")) {
         isEpoch = true;
         return true;
       } else {
         try {
-          dateFormatter = new SimpleDateFormat(dateFormat);
+          targetDateFormatter = new SimpleDateFormat(targetDateFormat);
+          if (!StringUtils.isEmpty(srcDateFormat)) {
+            srcDateFormatter = new SimpleDateFormat(srcDateFormat);
+          }
           return true;
         } catch (Throwable ex) {
           logger.fatal("Error creating date format. format="
-            + dateFormat + ". " + this.toString());
+            + targetDateFormat + ". " + this.toString());
         }
-      }
+      } 
     }
     return false;
   }
@@ -77,8 +83,26 @@ public class MapperDate extends Mapper {
         if (isEpoch) {
           long ms = Long.parseLong(value.toString()) * 1000;
           value = new Date(ms);
-        } else if (dateFormatter != null) {
-          value = dateFormatter.parse(value.toString());
+        } else if (targetDateFormatter != null) {
+          if(srcDateFormatter!=null){
+            Date srcDate = srcDateFormatter.parse(value.toString());
+            //set year in src_date when src_date does not have year component
+            if (!srcDateFormatter.toPattern().contains("yy")) {
+              Calendar currentCalendar = Calendar.getInstance();
+              Calendar logDateCalendar = Calendar.getInstance();
+              logDateCalendar.setTimeInMillis(srcDate.getTime());
+              if (logDateCalendar.get(Calendar.MONTH) > currentCalendar.get(Calendar.MONTH)) {
+                // set previous year as a log year  when log month is grater than current month
+                srcDate = DateUtils.setYears(srcDate, currentCalendar.get(Calendar.YEAR) - 1);
+              } else {
+                // set current year as a log year
+                srcDate = DateUtils.setYears(srcDate, currentCalendar.get(Calendar.YEAR));
+              }
+            }
+            value = targetDateFormatter.format(srcDate);
+          } else {
+            value = targetDateFormatter.parse(value.toString());
+          }
         } else {
           return value;
         }
@@ -87,7 +111,7 @@ public class MapperDate extends Mapper {
         LogFeederUtil.logErrorMessageByInterval(this.getClass()
             .getSimpleName() + ":apply",
           "Error applying date transformation. isEpoch="
-            + isEpoch + ", dateFormat=" + dateFormat
+            + isEpoch + ", targetateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.toPattern():"")
             + ", value=" + value + ". " + this.toString(),
           t, logger, Level.ERROR);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
index 1c0ce67..2ca9353 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/DateUtil.java
@@ -38,10 +38,4 @@ public class DateUtil {
     }
     return "";
   }
-
-  public static void main(String[] args) {
-    Date currentDate = new Date();
-    String fileDateFormat = "yyyy-MM-dd-HH-mm-ss";
-    System.out.println(dateToString(currentDate, fileDateFormat));
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
index 0652182..301dea9 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
@@ -41,7 +41,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_epoch()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("date_pattern", "epoch");
+    mapConfigs.put("target_date_pattern", "epoch");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -61,7 +61,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_pattern()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -101,7 +101,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_notParsableDatePattern()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("date_pattern", "not_parsable_content");
+    mapConfigs.put("target_date_pattern", "not_parsable_content");
 
     MapperDate mapperDate = new MapperDate();
     assertFalse("Was able to initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -112,7 +112,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_invalidEpochValue()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("date_pattern", "epoch");
+    mapConfigs.put("target_date_pattern", "epoch");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -130,7 +130,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_invalidDateStringValue()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -142,4 +142,60 @@ public class MapperDateTest {
     assertEquals("Invalid value wasn't returned as it is", invalidValue, mappedValue);
     assertTrue("jsonObj is not empty", jsonObj.isEmpty());
   }
+  
+  @Test
+  public void testMapperDate_patternWithoutYear_previousYearLog() throws Exception {
+    LOG.info("testMapperDate_patternWithoutYear_previousYearLog()");
+    String fieldName = "logtime";
+    Calendar currentCalendar = Calendar.getInstance();
+    Map<String, Object> mapConfigs = new HashMap<>();
+    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    String srcDatePattern ="MMM dd HH:mm:ss";
+    mapConfigs.put("src_date_pattern", srcDatePattern);
+    MapperDate mapperDate = new MapperDate();
+    assertTrue("Could not initialize!", mapperDate.init(null, fieldName, null, mapConfigs));
+    Map<String, Object> jsonObj = new HashMap<>();
+    Calendar nextMonthCalendar = Calendar.getInstance();
+    
+    nextMonthCalendar.set(Calendar.MONTH, currentCalendar.get(Calendar.MONTH)+1 );
+    String inputDateStr = new SimpleDateFormat("MMM").format(nextMonthCalendar.getTime()) + " 01 12:01:45";
+    Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
+    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
+    assertEquals(Date.class, mappedDateValue.getClass());
+    
+    int expectedLogYear = currentCalendar.get(Calendar.YEAR)-1;
+    Calendar mapppedValueCalendar = Calendar.getInstance();
+    mapppedValueCalendar.setTime(mappedDateValue);
+    assertEquals("Mapped year wasn't matched properly", expectedLogYear, mapppedValueCalendar.get(Calendar.YEAR));
+    assertEquals("Mapped date wasn't matched properly", inputDateStr, mappedDateValueStr);
+    assertEquals("Value wasn't put into jsonObj",mappedValue, jsonObj.remove(fieldName));
+    assertTrue("jsonObj is not empty", jsonObj.isEmpty());
+  }
+  
+  @Test
+  public void testMapperDate_patternWithoutYear_currentYearLog() throws Exception {
+    LOG.info("testMapperDate_patternWithoutYear_currentYearLog()");
+    String fieldName = "logtime";
+    Calendar currentCalendar = Calendar.getInstance();
+    Map<String, Object> mapConfigs = new HashMap<>();
+    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    String srcDatePattern ="MMM dd HH:mm:ss";
+    mapConfigs.put("src_date_pattern", srcDatePattern);
+    MapperDate mapperDate = new MapperDate();
+    assertTrue("Could not initialize!", mapperDate.init(null, fieldName, null, mapConfigs));
+    Map<String, Object> jsonObj = new HashMap<>();
+    String inputDateStr = new SimpleDateFormat("MMM").format(currentCalendar.getTime()) + " 01 12:01:45";
+    Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
+    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
+    assertEquals(Date.class, mappedDateValue.getClass());
+    int expectedLogYear = currentCalendar.get(Calendar.YEAR);
+    Calendar mapppedValueCalendar = Calendar.getInstance();
+    mapppedValueCalendar.setTime(mappedDateValue);
+    assertEquals("Mapped year wasn't matched properly", expectedLogYear, mapppedValueCalendar.get(Calendar.YEAR));
+    assertEquals("Mapped date wasn't matched properly", inputDateStr, mappedDateValueStr);
+    assertEquals("Value wasn't put into jsonObj",mappedValue, jsonObj.remove(fieldName));
+    assertTrue("jsonObj is not empty", jsonObj.isEmpty());
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
index 819af25..dd4bf6b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/HadoopServiceConfig.json
@@ -470,6 +470,19 @@
       ],
       "dependencies": [
       ]
+    },
+   "System": {
+      "label": "System",
+      "components": [
+        {
+          "name": "system_message"
+        },
+        {
+          "name": "secure_log"
+        }
+      ],
+      "dependencies": [
+      ]
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
index 8112462..b588a2d 100644
--- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
+++ b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
@@ -19,8 +19,10 @@ logfeeder.config.files=shipper-conf/global.config.json,\
   shipper-conf/output.config.json,\
   shipper-conf/input.config-zookeeper.json,\
   shipper-conf/input.config-logsearch.json,\
-  shipper-conf/input.config-hst.json
+  shipper-conf/input.config-hst.json,\
+  shipper-conf/input.config-system_message.json,\
+  shipper-conf/input.config-secure_log.json
 logfeeder.log.filter.enable=true
 logfeeder.solr.config.interval=5
 logfeeder.solr.core.config.name=history
-logfeeder.solr.zk_connect_string=localhost:9983
\ No newline at end of file
+logfeeder.solr.zk_connect_string=localhost:9983

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json
new file mode 100644
index 0000000..93f6b1e
--- /dev/null
+++ b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-secure_log.json
@@ -0,0 +1,31 @@
+{
+  "input": [
+    {
+      "type": "secure_log",
+      "rowtype": "service",
+      "path": "/root/test-logs/secure_log/secure-log.txt"
+    }
+  ],
+  "filter": [
+    {
+      "filter": "grok",
+      "conditions": {
+        "fields": {
+          "type": [
+            "secure_log"
+          ]
+        }
+      },
+      "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})",
+      "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values": {
+        "logtime": {
+          "map_date": {
+            "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS",
+            "src_date_pattern" :"MMM dd HH:mm:ss"
+          }
+        }
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json
new file mode 100644
index 0000000..aef586d
--- /dev/null
+++ b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-system_message.json
@@ -0,0 +1,31 @@
+{
+  "input": [
+    {
+      "type": "system_message",
+      "rowtype": "service",
+      "path": "/root/test-logs/system_message/message_logs.txt"
+    }
+  ],
+  "filter": [
+    {
+      "filter": "grok",
+      "conditions": {
+        "fields": {
+          "type": [
+            "system_message"
+          ]
+        }
+      },
+      "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})",
+      "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values": {
+        "logtime": {
+          "map_date": {
+            "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS",
+            "src_date_pattern" :"MMM dd HH:mm:ss"
+          }
+        }
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties b/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
index 38d4aad..2c83581 100644
--- a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
+++ b/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
@@ -44,7 +44,7 @@ logsearch.solr.jmx.port=18886
 
 # Logfeeder Settings
 
-logsearch.logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE
+logsearch.logfeeder.include.default.level=FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN
 
 # logsearch-admin.json
 logsearch.auth.file.enable=true
@@ -54,4 +54,4 @@ logsearch.auth.ldap.enable=false
 logsearch.auth.simple.enable=false
 logsearch.auth.external_auth.enable=false
 
-logsearch.protocol=http
\ No newline at end of file
+logsearch.protocol=http

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt b/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt
new file mode 100644
index 0000000..de7fee6
--- /dev/null
+++ b/ambari-logsearch/docker/test-logs/secure_log/secure-log.txt
@@ -0,0 +1,11 @@
+Aug  9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0)
+Aug  9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa
+Aug  9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0)
+Aug  9 11:53:53 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa
+Aug  9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0)
+Aug  9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa
+Aug  9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session opened for user ambari-qa by (uid=0)
+Aug  9 11:54:19 logsearch.apache.org su: pam_unix(su-l:session): session closed for user ambari-qa
+Aug  9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session opened for user yarn by (uid=0)
+Aug  9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session closed for user yarn
+Aug  9 11:54:22 logsearch.apache.org su: pam_unix(su-l:session): session opened for user yarn by (uid=0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-logsearch/docker/test-logs/system_message/message_logs.txt
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-logs/system_message/message_logs.txt b/ambari-logsearch/docker/test-logs/system_message/message_logs.txt
new file mode 100644
index 0000000..a898622
--- /dev/null
+++ b/ambari-logsearch/docker/test-logs/system_message/message_logs.txt
@@ -0,0 +1,17 @@
+Aug 21 03:09:02 logsearch.apache.org rsyslogd: [origin software="rsyslogd" swVersion="5.8.10" x-pid="955" x-info="http://www.rsyslog.com"] rsyslogd was HUPed
+Aug 21 10:53:02 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad)
+Aug 21 10:53:02 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad)
+Aug 21 10:53:03 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 37432 seconds.
+Aug 21 21:16:55 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad)
+Aug 21 21:16:55 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad)
+Aug 21 21:16:56 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 35453 seconds.
+Aug 22 06:39:48 logsearch.apache.org rsyslogd-2177: imuxsock lost 369 messages from pid 5254 due to rate-limiting
+Aug 22 07:07:49 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad)
+Aug 22 07:07:49 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad)
+Aug 22 07:07:50 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 36355 seconds.
+Aug 22 17:13:45 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad)
+Aug 22 17:13:45 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad)
+Aug 22 17:13:46 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 37482 seconds.
+Aug 23 03:38:28 logsearch.apache.org dhclient[891]: DHCPREQUEST on eth0 to 172.22.112.67 port 67 (xid=0x2cb1d8ad)
+Aug 23 03:38:28 logsearch.apache.org dhclient[891]: DHCPACK from 172.22.112.67 (xid=0x2cb1d8ad)
+Aug 23 03:38:29 logsearch.apache.org dhclient[891]: bound to 172.22.89.76 -- renewal in 31187 seconds.

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
index eb56b1c..1fd93cf 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
@@ -62,7 +62,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -91,7 +91,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
           
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
index 44dc61a..34f4b30 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
@@ -78,7 +78,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         },
@@ -110,7 +110,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"dd MMM yyyy HH:mm:ss"
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
           }
 
         }
@@ -135,7 +135,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -160,7 +160,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -185,7 +185,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
           }
 
         },
@@ -217,7 +217,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -242,7 +242,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd'T'HH:mm:ss.SSSXX"
+            "target_date_pattern":"yyyy-MM-dd'T'HH:mm:ss.SSSXX"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
index 355e903..0bcaf56 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
@@ -61,7 +61,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -87,7 +87,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -112,7 +112,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy/MM/dd HH:mm:ss"
+            "target_date_pattern":"yyyy/MM/dd HH:mm:ss"
           }
 
         },
@@ -169,7 +169,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
index ae799b3..53912c8 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
@@ -41,7 +41,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
index c0d27a8..3fea3d3 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
@@ -41,7 +41,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
index d1ea632..b31054b 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
@@ -42,7 +42,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
index 78204c7..0c04052 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
@@ -53,7 +53,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -77,7 +77,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
index 2222171..3dff6f5 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
@@ -84,7 +84,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
         }
 
@@ -108,7 +108,7 @@
       "post_map_values":{
         "evtTime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
index f0fbf0a..f96b16f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
@@ -52,7 +52,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -77,7 +77,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
           }
 
         },

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hst.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hst.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hst.json.j2
index 82b89a9..33a41a6 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hst.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hst.json.j2
@@ -55,7 +55,7 @@
          "post_map_values":{
            "logtime":{
               "map_date":{
-                "date_pattern":"dd MMM yyyy HH:mm:ss"
+                "target_date_pattern":"dd MMM yyyy HH:mm:ss"
               }
             },
             "level":{
@@ -83,7 +83,7 @@
          "post_map_values":{
            "logtime":{
               "map_date":{
-                 "date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+                 "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
                }
 
            },

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
index d2b9ce5..20fa49d 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
@@ -42,7 +42,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
index 73e501d..d05d4ad 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
@@ -64,7 +64,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -91,7 +91,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
index 50c2c6d..df98e25 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
@@ -54,7 +54,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-nifi.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-nifi.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-nifi.json.j2
index 64c69a7..99f1740 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-nifi.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-nifi.json.j2
@@ -60,7 +60,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
index fc125ec..ddb8198 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
@@ -42,7 +42,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
index 8ec0153..7b12869 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
@@ -58,7 +58,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -83,7 +83,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }
@@ -108,7 +108,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"dd MMM yyyy HH:mm:ss"
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
index 4aacc6f..4371276 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
@@ -53,7 +53,7 @@
           "post_map_values":{
             "logtime":{
               "map_date":{
-                "date_pattern":"yy/MM/dd HH:mm:ss"
+                "target_date_pattern":"yy/MM/dd HH:mm:ss"
               }
 
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
index 20bd946..01aea36 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
@@ -47,7 +47,7 @@
           "post_map_values":{
             "logtime":{
               "map_date":{
-                "date_pattern":"yy/MM/dd HH:mm:ss"
+                "target_date_pattern":"yy/MM/dd HH:mm:ss"
               }
 
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
index e8e95c3..07a4c2e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
@@ -72,7 +72,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
index 3d9677e..f8e77ae 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
@@ -72,7 +72,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
index 09cd8b3..c0948b5 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
@@ -42,7 +42,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c293088/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
index 5525a64..fcc5dc9 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
@@ -42,7 +42,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }


[49/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
index 5feb9c4..e13d9bd 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/Input.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logfeeder.input;
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -29,88 +28,138 @@ import java.util.Map;
 import org.apache.ambari.logfeeder.common.ConfigBlock;
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 
 public abstract class Input extends ConfigBlock implements Runnable {
-  static private Logger logger = Logger.getLogger(Input.class);
-
-  protected OutputMgr outputMgr;
-  protected InputMgr inputMgr;
+  private static final Logger LOG = Logger.getLogger(Input.class);
 
+  private static final boolean DEFAULT_TAIL = true;
+  private static final boolean DEFAULT_USE_EVENT_MD5 = false;
+  private static final boolean DEFAULT_GEN_EVENT_MD5 = true;
+  
+  protected InputManager inputManager;
+  protected OutputManager outputManager;
   private List<Output> outputList = new ArrayList<Output>();
 
-  private Filter firstFilter = null;
   private Thread thread;
-  private boolean isClosed = false;
-  protected String filePath = null;
-  private String type = null;
+  private String type;
+  protected String filePath;
+  private Filter firstFilter;
+  private boolean isClosed;
 
-  protected boolean tail = true;
-  private boolean useEventMD5 = false;
-  private boolean genEventMD5 = true;
+  protected boolean tail;
+  private boolean useEventMD5;
+  private boolean genEventMD5;
 
-  protected MetricCount readBytesMetric = new MetricCount();
+  protected MetricData readBytesMetric = new MetricData(getReadBytesMetricName(), false);
+  protected String getReadBytesMetricName() {
+    return null;
+  }
+  
+  @Override
+  public void loadConfig(Map<String, Object> map) {
+    super.loadConfig(map);
+    String typeValue = getStringValue("type");
+    if (typeValue != null) {
+      // Explicitly add type and value to field list
+      contextFields.put("type", typeValue);
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) map.get("add_fields");
+      if (addFields == null) {
+        addFields = new HashMap<String, Object>();
+        map.put("add_fields", addFields);
+      }
+      addFields.put("type", typeValue);
+    }
+  }
 
-  /**
-   * This method will be called from the thread spawned for the output. This
-   * method should only exit after all data are read from the source or the
-   * process is exiting
-   */
-  abstract void start() throws Exception;
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public void setInputManager(InputManager inputManager) {
+    this.inputManager = inputManager;
+  }
+
+  public void setOutputManager(OutputManager outputManager) {
+    this.outputManager = outputManager;
+  }
+
+  public void addFilter(Filter filter) {
+    if (firstFilter == null) {
+      firstFilter = filter;
+    } else {
+      Filter f = firstFilter;
+      while (f.getNextFilter() != null) {
+        f = f.getNextFilter();
+      }
+      f.setNextFilter(filter);
+    }
+  }
+
+  public void addOutput(Output output) {
+    outputList.add(output);
+  }
 
   @Override
   public void init() throws Exception {
     super.init();
-    tail = getBooleanValue("tail", tail);
-    useEventMD5 = getBooleanValue("use_event_md5_as_id", useEventMD5);
-    genEventMD5 = getBooleanValue("gen_event_md5", genEventMD5);
+    tail = getBooleanValue("tail", DEFAULT_TAIL);
+    useEventMD5 = getBooleanValue("use_event_md5_as_id", DEFAULT_USE_EVENT_MD5);
+    genEventMD5 = getBooleanValue("gen_event_md5", DEFAULT_GEN_EVENT_MD5);
 
     if (firstFilter != null) {
       firstFilter.init();
     }
   }
 
-  @Override
-  public String getNameForThread() {
-    if (filePath != null) {
-      try {
-        return (type + "=" + (new File(filePath)).getName());
-      } catch (Throwable ex) {
-        logger.warn("Couldn't get basename for filePath=" + filePath,
-          ex);
-      }
+  boolean monitor() {
+    if (isReady()) {
+      LOG.info("Starting thread. " + getShortDescription());
+      thread = new Thread(this, getNameForThread());
+      thread.start();
+      return true;
+    } else {
+      return false;
     }
-    return super.getNameForThread() + ":" + type;
   }
 
+  public abstract boolean isReady();
+
   @Override
   public void run() {
     try {
-      logger.info("Started to monitor. " + getShortDescription());
+      LOG.info("Started to monitor. " + getShortDescription());
       start();
     } catch (Exception e) {
-      logger.error("Error writing to output.", e);
+      LOG.error("Error writing to output.", e);
     }
-    logger.info("Exiting thread. " + getShortDescription());
+    LOG.info("Exiting thread. " + getShortDescription());
   }
 
+  /**
+   * This method will be called from the thread spawned for the output. This
+   * method should only exit after all data are read from the source or the
+   * process is exiting
+   */
+  abstract void start() throws Exception;
+
   protected void outputLine(String line, InputMarker marker) {
-    statMetric.count++;
-    readBytesMetric.count += (line.length());
+    statMetric.value++;
+    readBytesMetric.value += (line.length());
 
     if (firstFilter != null) {
       try {
         firstFilter.apply(line, marker);
       } catch (LogfeederException e) {
-        logger.error(e.getLocalizedMessage(),e);
+        LOG.error(e.getLocalizedMessage(), e);
       }
     } else {
-      // TODO: For now, let's make filter mandatory, so that no one
-      // accidently forgets to write filter
-      // outputMgr.write(line, this);
+      // TODO: For now, let's make filter mandatory, so that no one accidently forgets to write filter
+      // outputManager.write(line, this);
     }
   }
 
@@ -120,60 +169,10 @@ public abstract class Input extends ConfigBlock implements Runnable {
     }
   }
 
-  public boolean monitor() {
-    if (isReady()) {
-      logger.info("Starting thread. " + getShortDescription());
-      thread = new Thread(this, getNameForThread());
-      thread.start();
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  public void checkIn(InputMarker inputMarker) {
-    // Default implementation is to ignore.
-  }
-
-  /**
-   * This is generally used by final checkin
-   */
-  public void checkIn() {
-  }
-
-  public boolean isReady() {
-    return true;
-  }
-
-  public boolean isTail() {
-    return tail;
-  }
-
-  public void setTail(boolean tail) {
-    this.tail = tail;
-  }
-
-  public boolean isUseEventMD5() {
-    return useEventMD5;
-  }
-
-  public void setUseEventMD5(boolean useEventMD5) {
-    this.useEventMD5 = useEventMD5;
-  }
-
-  public boolean isGenEventMD5() {
-    return genEventMD5;
-  }
-
-  public void setGenEventMD5(boolean genEventMD5) {
-    this.genEventMD5 = genEventMD5;
-  }
-
   @Override
   public void setDrain(boolean drain) {
-    logger.info("Request to drain. " + getShortDescription());
+    LOG.info("Request to drain. " + getShortDescription());
     super.setDrain(drain);
-    ;
     try {
       thread.interrupt();
     } catch (Throwable t) {
@@ -181,38 +180,36 @@ public abstract class Input extends ConfigBlock implements Runnable {
     }
   }
 
-  public Filter getFirstFilter() {
-    return firstFilter;
-  }
-
-  public void setFirstFilter(Filter filter) {
-    firstFilter = filter;
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    super.addMetricsContainers(metricsList);
+    if (firstFilter != null) {
+      firstFilter.addMetricsContainers(metricsList);
+    }
+    metricsList.add(readBytesMetric);
   }
 
-  public void setInputMgr(InputMgr inputMgr) {
-    this.inputMgr = inputMgr;
-  }
+  @Override
+  public void logStat() {
+    super.logStat();
+    logStatForMetric(readBytesMetric, "Stat: Bytes Read");
 
-  public void setOutputMgr(OutputMgr outputMgr) {
-    this.outputMgr = outputMgr;
+    if (firstFilter != null) {
+      firstFilter.logStat();
+    }
   }
 
-  public String getFilePath() {
-    return filePath;
-  }
+  public abstract void checkIn(InputMarker inputMarker);
 
-  public void setFilePath(String filePath) {
-    this.filePath = filePath;
-  }
+  public abstract void lastCheckIn();
 
   public void close() {
-    logger.info("Close called. " + getShortDescription());
+    LOG.info("Close called. " + getShortDescription());
 
     try {
       if (firstFilter != null) {
         firstFilter.close();
       } else {
-        outputMgr.close();
+        outputManager.close();
       }
     } catch (Throwable t) {
       // Ignore
@@ -220,86 +217,60 @@ public abstract class Input extends ConfigBlock implements Runnable {
     isClosed = true;
   }
 
-  public void setClosed(boolean isClosed) {
-    this.isClosed = isClosed;
-  }
-
-  public boolean isClosed() {
-    return isClosed;
-  }
-
-  @Override
-  public void loadConfig(Map<String, Object> map) {
-    super.loadConfig(map);
-    String typeValue = getStringValue("type");
-    if (typeValue != null) {
-      // Explicitly add type and value to field list
-      contextFields.put("type", typeValue);
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) map
-        .get("add_fields");
-      if (addFields == null) {
-        addFields = new HashMap<String, Object>();
-        map.put("add_fields", addFields);
-      }
-      addFields.put("type", typeValue);
-    }
+  public boolean isTail() {
+    return tail;
   }
 
-  @Override
-  public String getShortDescription() {
-    return null;
+  public boolean isUseEventMD5() {
+    return useEventMD5;
   }
 
-  @Override
-  public void logStat() {
-    super.logStat();
-    logStatForMetric(readBytesMetric, "Stat: Bytes Read");
-
-    if (firstFilter != null) {
-      firstFilter.logStat();
-    }
+  public boolean isGenEventMD5() {
+    return genEventMD5;
   }
 
-  @Override
-  public String toString() {
-    return getShortDescription();
+  public Filter getFirstFilter() {
+    return firstFilter;
   }
 
-  public void rollOver() {
-    // Only some inputs support it. E.g. InputFile
+  public String getFilePath() {
+    return filePath;
   }
 
-  public String getType() {
-    return type;
+  public void setFilePath(String filePath) {
+    this.filePath = filePath;
   }
 
-  public void setType(String type) {
-    this.type = type;
+  public void setClosed(boolean isClosed) {
+    this.isClosed = isClosed;
   }
 
-  public Date getEventTime() {
-    return null;
+  public boolean isClosed() {
+    return isClosed;
   }
 
   public List<Output> getOutputList() {
     return outputList;
   }
-
-  public void addOutput(Output output) {
-    outputList.add(output);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    super.addMetricsContainers(metricsList);
-    if (firstFilter != null) {
-      firstFilter.addMetricsContainers(metricsList);
-    }
-    metricsList.add(readBytesMetric);
-  }
   
   public Thread getThread(){
     return thread;
   }
 
+  @Override
+  public String getNameForThread() {
+    if (filePath != null) {
+      try {
+        return (type + "=" + (new File(filePath)).getName());
+      } catch (Throwable ex) {
+        LOG.warn("Couldn't get basename for filePath=" + filePath, ex);
+      }
+    }
+    return super.getNameForThread() + ":" + type;
+  }
+
+  @Override
+  public String toString() {
+    return getShortDescription();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
index c9f5ded..3737839 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputFile.java
@@ -19,528 +19,99 @@
 package org.apache.ambari.logfeeder.input;
 
 import java.io.BufferedReader;
-import java.io.EOFException;
 import java.io.File;
 import java.io.FileFilter;
 import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.reader.LogsearchReaderFactory;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
-public class InputFile extends Input {
-  private static final Logger logger = Logger.getLogger(InputFile.class);
-
-  private String logPath = null;
-  private boolean isStartFromBegining = true;
-
-  private boolean isReady = false;
-  private File[] logPathFiles = null;
-  private Object fileKey = null;
-  private String base64FileKey = null;
-
-  private boolean isRolledOver = false;
-  private boolean addWildCard = false;
-
-  private long lastCheckPointTimeMS = 0;
-  private int checkPointIntervalMS = 5 * 1000; // 5 seconds
-  private RandomAccessFile checkPointWriter = null;
-  private Map<String, Object> jsonCheckPoint = null;
-
-  private File checkPointFile = null;
-
-  private InputMarker lastCheckPointInputMarker = null;
-
-  private String checkPointExtension = ".cp";
-
-  @Override
-  public void init() throws Exception {
-    logger.info("init() called");
-    statMetric.metricsName = "input.files.read_lines";
-    readBytesMetric.metricsName = "input.files.read_bytes";
-    checkPointExtension = LogFeederUtil.getStringProperty(
-      "logfeeder.checkpoint.extension", checkPointExtension);
-
-    // Let's close the file and set it to true after we start monitoring it
-    setClosed(true);
-    logPath = getStringValue("path");
-    tail = getBooleanValue("tail", tail);
-    addWildCard = getBooleanValue("add_wild_card", addWildCard);
-    checkPointIntervalMS = getIntValue("checkpoint.interval.ms",
-      checkPointIntervalMS);
-
-    if (logPath == null || logPath.isEmpty()) {
-      logger.error("path is empty for file input. "
-        + getShortDescription());
-      return;
-    }
-
-    String startPosition = getStringValue("start_position");
-    if (StringUtils.isEmpty(startPosition)
-      || startPosition.equalsIgnoreCase("beginning")
-      || startPosition.equalsIgnoreCase("begining")) {
-      isStartFromBegining = true;
-    }
-
-    if (!tail) {
-      // start position end doesn't apply if we are not tailing
-      isStartFromBegining = true;
-    }
-
-    setFilePath(logPath);
-    boolean isFileReady = isReady();
-
-    logger.info("File to monitor " + logPath + ", tail=" + tail
-      + ", addWildCard=" + addWildCard + ", isReady=" + isFileReady);
-
-    super.init();
-  }
+public class InputFile extends AbstractInputFile {
 
   @Override
   public boolean isReady() {
     if (!isReady) {
       // Let's try to check whether the file is available
-      logPathFiles = getActualFiles(logPath);
-      if (logPathFiles != null && logPathFiles.length > 0
-        && logPathFiles[0].isFile()) {
-
-        if (isTail() && logPathFiles.length > 1) {
-          logger.warn("Found multiple files (" + logPathFiles.length
-            + ") for the file filter " + filePath
-            + ". Will use only the first one. Using "
-            + logPathFiles[0].getAbsolutePath());
+      logFiles = getActualFiles(logPath);
+      if (!ArrayUtils.isEmpty(logFiles) && logFiles[0].isFile()) {
+        if (tail && logFiles.length > 1) {
+          LOG.warn("Found multiple files (" + logFiles.length + ") for the file filter " + filePath +
+              ". Will use only the first one. Using " + logFiles[0].getAbsolutePath());
         }
-        logger.info("File filter " + filePath + " expanded to "
-          + logPathFiles[0].getAbsolutePath());
+        LOG.info("File filter " + filePath + " expanded to " + logFiles[0].getAbsolutePath());
         isReady = true;
       } else {
-        logger.debug(logPath + " file doesn't exist. Ignoring for now");
+        LOG.debug(logPath + " file doesn't exist. Ignoring for now");
       }
     }
     return isReady;
   }
 
   private File[] getActualFiles(String searchPath) {
-    if (addWildCard) {
-      if (!searchPath.endsWith("*")) {
-        searchPath = searchPath + "*";
-      }
-    }
-    File checkFile = new File(searchPath);
-    if (checkFile.isFile()) {
-      return new File[]{checkFile};
+    File searchFile = new File(searchPath);
+    if (searchFile.isFile()) {
+      return new File[]{searchFile};
+    } else {
+      FileFilter fileFilter = new WildcardFileFilter(searchFile.getName());
+      return searchFile.getParentFile().listFiles(fileFilter);
     }
-    // Let's do wild card search
-    // First check current folder
-    File checkFiles[] = findFileForWildCard(searchPath, new File("."));
-    if (checkFiles == null || checkFiles.length == 0) {
-      // Let's check from the parent folder
-      File parentDir = (new File(searchPath)).getParentFile();
-      if (parentDir != null) {
-        String wildCard = (new File(searchPath)).getName();
-        checkFiles = findFileForWildCard(wildCard, parentDir);
-      }
-    }
-    return checkFiles;
-  }
-
-  private File[] findFileForWildCard(String searchPath, File dir) {
-    logger.debug("findFileForWildCard(). filePath=" + searchPath + ", dir="
-      + dir + ", dir.fullpath=" + dir.getAbsolutePath());
-    FileFilter fileFilter = new WildcardFileFilter(searchPath);
-    return dir.listFiles(fileFilter);
-  }
-
-  @Override
-  synchronized public void checkIn(InputMarker inputMarker) {
-    super.checkIn(inputMarker);
-    if (checkPointWriter != null) {
-      try {
-        int lineNumber = LogFeederUtil.objectToInt(
-          jsonCheckPoint.get("line_number"), 0, "line_number");
-        if (lineNumber > inputMarker.lineNumber) {
-          // Already wrote higher line number for this input
-          return;
-        }
-        // If interval is greater than last checkPoint time, then write
-        long currMS = System.currentTimeMillis();
-        if (!isClosed()
-          && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
-          // Let's save this one so we can update the check point file
-          // on flush
-          lastCheckPointInputMarker = inputMarker;
-          return;
-        }
-        lastCheckPointTimeMS = currMS;
-
-        jsonCheckPoint.put("line_number", ""
-          + new Integer(inputMarker.lineNumber));
-        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
-        jsonCheckPoint.put("last_write_time_date", new Date());
-
-        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
-
-        // Let's rewind
-        checkPointWriter.seek(0);
-        checkPointWriter.writeInt(jsonStr.length());
-        checkPointWriter.write(jsonStr.getBytes());
-
-        if (isClosed()) {
-          final String LOG_MESSAGE_KEY = this.getClass()
-            .getSimpleName() + "_FINAL_CHECKIN";
-          LogFeederUtil.logErrorMessageByInterval(
-            LOG_MESSAGE_KEY,
-            "Wrote final checkPoint, input="
-              + getShortDescription()
-              + ", checkPointFile="
-              + checkPointFile.getAbsolutePath()
-              + ", checkPoint=" + jsonStr, null, logger,
-            Level.INFO);
-        }
-      } catch (Throwable t) {
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-          + "_CHECKIN_EXCEPTION";
-        LogFeederUtil
-          .logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception checkIn. , input="
-              + getShortDescription(), t, logger,
-            Level.ERROR);
-      }
-    }
-
-  }
-
-  @Override
-  public void checkIn() {
-    super.checkIn();
-    if (lastCheckPointInputMarker != null) {
-      checkIn(lastCheckPointInputMarker);
-    }
-  }
-
-  @Override
-  public void rollOver() {
-    logger.info("Marking this input file for rollover. "
-      + getShortDescription());
-    isRolledOver = true;
   }
 
   @Override
   void start() throws Exception {
-
-    if (logPathFiles == null || logPathFiles.length == 0) {
-      return;
-    }
     boolean isProcessFile = getBooleanValue("process_file", true);
     if (isProcessFile) {
-      if (isTail()) {
-        processFile(logPathFiles[0]);
+      if (tail) {
+        processFile(logFiles[0]);
       } else {
-        for (File file : logPathFiles) {
+        for (File file : logFiles) {
           try {
             processFile(file);
             if (isClosed() || isDrain()) {
-              logger.info("isClosed or isDrain. Now breaking loop.");
+              LOG.info("isClosed or isDrain. Now breaking loop.");
               break;
             }
           } catch (Throwable t) {
-            logger.error("Error processing file=" + file.getAbsolutePath(), t);
+            LOG.error("Error processing file=" + file.getAbsolutePath(), t);
           }
         }
       }
       close();
-    }else{
-      copyFiles(logPathFiles);
+    } else {
+      copyFiles(logFiles);
     }
-    
   }
 
   @Override
-  public void close() {
-    super.close();
-    logger.info("close() calling checkPoint checkIn(). "
-      + getShortDescription());
-    checkIn();
-  }
-
-  private void processFile(File logPathFile) throws FileNotFoundException,
-    IOException {
-    logger.info("Monitoring logPath=" + logPath + ", logPathFile="
-      + logPathFile);
-    BufferedReader br = null;
-    checkPointFile = null;
-    checkPointWriter = null;
-    jsonCheckPoint = null;
-    int resumeFromLineNumber = 0;
-
-    int lineCount = 0;
-    try {
-      setFilePath(logPathFile.getAbsolutePath());
-      br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logPathFile));
-
-      // Whether to send to output from the beginning.
-      boolean resume = isStartFromBegining;
-
-      // Seems FileWatch is not reliable, so let's only use file key comparison
-      fileKey = getFileKey(logPathFile);
-      base64FileKey = Base64.byteArrayToBase64(fileKey.toString()
-        .getBytes());
-      logger.info("fileKey=" + fileKey + ", base64=" + base64FileKey
-        + ". " + getShortDescription());
-
-      if (isTail()) {
-        try {
-          logger.info("Checking existing checkpoint file. "
-            + getShortDescription());
-
-          String fileBase64 = Base64.byteArrayToBase64(fileKey
-            .toString().getBytes());
-          String checkPointFileName = fileBase64
-            + checkPointExtension;
-          File checkPointFolder = inputMgr.getCheckPointFolderFile();
-          checkPointFile = new File(checkPointFolder,
-            checkPointFileName);
-          checkPointWriter = new RandomAccessFile(checkPointFile,
-            "rw");
-
-          try {
-            int contentSize = checkPointWriter.readInt();
-            byte b[] = new byte[contentSize];
-            int readSize = checkPointWriter.read(b, 0, contentSize);
-            if (readSize != contentSize) {
-              logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
-                + contentSize
-                + ", read="
-                + readSize
-                + ", checkPointFile="
-                + checkPointFile
-                + ", input=" + getShortDescription());
-            } else {
-              String jsonCheckPointStr = new String(b, 0, readSize);
-              jsonCheckPoint = LogFeederUtil
-                .toJSONObject(jsonCheckPointStr);
-
-              resumeFromLineNumber = LogFeederUtil.objectToInt(
-                jsonCheckPoint.get("line_number"), 0,
-                "line_number");
-
-              if (resumeFromLineNumber > 0) {
-                // Let's read from last line read
-                resume = false;
-              }
-              logger.info("CheckPoint. checkPointFile="
-                + checkPointFile + ", json="
-                + jsonCheckPointStr
-                + ", resumeFromLineNumber="
-                + resumeFromLineNumber + ", resume="
-                + resume);
-            }
-          } catch (EOFException eofEx) {
-            logger.info("EOFException. Will reset checkpoint file "
-              + checkPointFile.getAbsolutePath() + " for "
-              + getShortDescription());
-          }
-          if (jsonCheckPoint == null) {
-            // This seems to be first time, so creating the initial
-            // checkPoint object
-            jsonCheckPoint = new HashMap<String, Object>();
-            jsonCheckPoint.put("file_path", filePath);
-            jsonCheckPoint.put("file_key", fileBase64);
-          }
-
-        } catch (Throwable t) {
-          logger.error(
-            "Error while configuring checkpoint file. Will reset file. checkPointFile="
-              + checkPointFile, t);
-        }
-      }
-
-      setClosed(false);
-      int sleepStep = 2;
-      int sleepIteration = 0;
-      while (true) {
-        try {
-          if (isDrain()) {
-            break;
-          }
-
-          String line = br.readLine();
-          if (line == null) {
-            if (!resume) {
-              resume = true;
-            }
-            sleepIteration++;
-            try {
-              // Since FileWatch service is not reliable, we will check
-              // file inode every n seconds after no write
-              if (sleepIteration > 4) {
-                Object newFileKey = getFileKey(logPathFile);
-                if (newFileKey != null) {
-                  if (fileKey == null
-                    || !newFileKey.equals(fileKey)) {
-                    logger.info("File key is different. Calling rollover. oldKey="
-                      + fileKey
-                      + ", newKey="
-                      + newFileKey
-                      + ". "
-                      + getShortDescription());
-                    // File has rotated.
-                    rollOver();
-                  }
-                }
-              }
-              // Flush on the second iteration
-              if (!tail && sleepIteration >= 2) {
-                logger.info("End of file. Done with filePath="
-                  + logPathFile.getAbsolutePath()
-                  + ", lineCount=" + lineCount);
-                flush();
-                break;
-              } else if (sleepIteration == 2) {
-                flush();
-              } else if (sleepIteration >= 2) {
-                if (isRolledOver) {
-                  isRolledOver = false;
-                  // Close existing file
-                  try {
-                    logger.info("File is rolled over. Closing current open file."
-                      + getShortDescription()
-                      + ", lineCount=" + lineCount);
-                    br.close();
-                  } catch (Exception ex) {
-                    logger.error("Error closing file"
-                      + getShortDescription());
-                    break;
-                  }
-                  try {
-                    logger.info("Opening new rolled over file."
-                      + getShortDescription());
-                    br = new BufferedReader(LogsearchReaderFactory.
-                      INSTANCE.getReader(logPathFile));
-                    lineCount = 0;
-                    fileKey = getFileKey(logPathFile);
-                    base64FileKey = Base64
-                      .byteArrayToBase64(fileKey
-                        .toString().getBytes());
-                    logger.info("fileKey=" + fileKey
-                      + ", base64=" + base64FileKey
-                      + ", " + getShortDescription());
-                  } catch (Exception ex) {
-                    logger.error("Error opening rolled over file. "
-                      + getShortDescription());
-                    // Let's add this to monitoring and exit this thread
-                    logger.info("Added input to not ready list."
-                      + getShortDescription());
-                    isReady = false;
-                    inputMgr.addToNotReady(this);
-                    break;
-                  }
-                  logger.info("File is successfully rolled over. "
-                    + getShortDescription());
-                  continue;
-                }
-              }
-              Thread.sleep(sleepStep * 1000);
-              sleepStep = (sleepStep * 2);
-              sleepStep = sleepStep > 10 ? 10 : sleepStep;
-            } catch (InterruptedException e) {
-              logger.info("Thread interrupted."
-                + getShortDescription());
-            }
-          } else {
-            lineCount++;
-            sleepStep = 1;
-            sleepIteration = 0;
-
-            if (!resume && lineCount > resumeFromLineNumber) {
-              logger.info("Resuming to read from last line. lineCount="
-                + lineCount
-                + ", input="
-                + getShortDescription());
-              resume = true;
-            }
-            if (resume) {
-              InputMarker marker = new InputMarker();
-              marker.base64FileKey = base64FileKey;
-              marker.input = this;
-              marker.lineNumber = lineCount;
-              outputLine(line, marker);
-            }
-          }
-        } catch (Throwable t) {
-          final String LOG_MESSAGE_KEY = this.getClass()
-            .getSimpleName() + "_READ_LOOP_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception in read loop. lineNumber="
-              + lineCount + ", input="
-              + getShortDescription(), t, logger,
-            Level.ERROR);
-
-        }
-      }
-    } finally {
-      if (br != null) {
-        logger.info("Closing reader." + getShortDescription()
-          + ", lineCount=" + lineCount);
-        try {
-          br.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
-    }
-  }
-
-  static public Object getFileKey(File file) {
-    try {
-      Path fileFullPath = Paths.get(file.getAbsolutePath());
-      if (fileFullPath != null) {
-        BasicFileAttributes basicAttr = Files.readAttributes(
-          fileFullPath, BasicFileAttributes.class);
-        return basicAttr.fileKey();
-      }
-    } catch (Throwable ex) {
-      logger.error("Error getting file attributes for file=" + file, ex);
-    }
-    return file.toString();
+  protected BufferedReader openLogFile(File logFile) throws FileNotFoundException {
+    BufferedReader br = new BufferedReader(LogsearchReaderFactory.INSTANCE.getReader(logFile));
+    fileKey = getFileKey(logFile);
+    base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
+    LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription());
+    return br;
   }
 
   @Override
-  public String getShortDescription() {
-    return "input:source="
-      + getStringValue("source")
-      + ", path="
-      + (logPathFiles != null && logPathFiles.length > 0 ? logPathFiles[0]
-      .getAbsolutePath() : getStringValue("path"));
+  protected Object getFileKey(File logFile) {
+    return FileUtil.getFileKey(logFile);
   }
-  
-  public void copyFiles(File[] files) {
+
+  private void copyFiles(File[] files) {
     boolean isCopyFile = getBooleanValue("copy_file", false);
     if (isCopyFile && files != null) {
       for (File file : files) {
         try {
-          InputMarker marker = new InputMarker();
-          marker.input = this;
-          outputMgr.copyFile(file, marker);
+          InputMarker marker = new InputMarker(this, null, 0);
+          outputManager.copyFile(file, marker);
           if (isClosed() || isDrain()) {
-            logger.info("isClosed or isDrain. Now breaking loop.");
+            LOG.info("isClosed or isDrain. Now breaking loop.");
             break;
           }
         } catch (Throwable t) {
-          logger.error("Error processing file=" + file.getAbsolutePath(), t);
+          LOG.error("Error processing file=" + file.getAbsolutePath(), t);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
new file mode 100644
index 0000000..8e70850
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
@@ -0,0 +1,379 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.util.FileUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.io.filefilter.WildcardFileFilter;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.solr.common.util.Base64;
+
+public class InputManager {
+  private static final Logger LOG = Logger.getLogger(InputManager.class);
+
+  private static final String CHECKPOINT_SUBFOLDER_NAME = "logfeeder_checkpoints";
+  public static final String DEFAULT_CHECKPOINT_EXTENSION = ".cp";
+  
+  private List<Input> inputList = new ArrayList<Input>();
+  private Set<Input> notReadyList = new HashSet<Input>();
+
+  private boolean isDrain = false;
+  private boolean isAnyInputTail = false;
+
+  private File checkPointFolderFile = null;
+
+  private MetricData filesCountMetric = new MetricData("input.files.count", true);
+
+  private String checkPointExtension;
+  
+  private Thread inputIsReadyMonitor = null;
+
+  public List<Input> getInputList() {
+    return inputList;
+  }
+
+  public void add(Input input) {
+    inputList.add(input);
+  }
+
+  public void removeInput(Input input) {
+    LOG.info("Trying to remove from inputList. " + input.getShortDescription());
+    Iterator<Input> iter = inputList.iterator();
+    while (iter.hasNext()) {
+      Input iterInput = iter.next();
+      if (iterInput.equals(input)) {
+        LOG.info("Removing Input from inputList. " + input.getShortDescription());
+        iter.remove();
+      }
+    }
+  }
+
+  private int getActiveFilesCount() {
+    int count = 0;
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        count++;
+      }
+    }
+    return count;
+  }
+
+  public void init() {
+    checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", DEFAULT_CHECKPOINT_EXTENSION);
+    for (Input input : inputList) {
+      try {
+        input.init();
+        if (input.isTail()) {
+          isAnyInputTail = true;
+        }
+      } catch (Exception e) {
+        LOG.error("Error initializing input. " + input.getShortDescription(), e);
+      }
+    }
+
+    if (isAnyInputTail) {
+      LOG.info("Determining valid checkpoint folder");
+      boolean isCheckPointFolderValid = false;
+      // We need to keep track of the files we are reading.
+      String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
+      if (!StringUtils.isEmpty(checkPointFolder)) {
+        checkPointFolderFile = new File(checkPointFolder);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's try home folder
+        String userHome = LogFeederUtil.getStringProperty("user.home");
+        if (userHome != null) {
+          checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
+          LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
+          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        }
+      }
+      if (!isCheckPointFolderValid) {
+        // Let's use tmp folder
+        String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
+        if (tmpFolder == null) {
+          tmpFolder = "/tmp";
+        }
+        checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
+        LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
+        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
+        if (isCheckPointFolderValid) {
+          LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
+              "Please set logfeeder.checkpoint.folder property");
+        }
+      }
+
+      if (isCheckPointFolderValid) {
+        LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
+      }
+    }
+
+  }
+
+  private boolean verifyCheckPointFolder(File folderPathFile) {
+    if (!folderPathFile.exists()) {
+      try {
+        if (!folderPathFile.mkdir()) {
+          LOG.warn("Error creating folder for check point. folder=" + folderPathFile);
+        }
+      } catch (Throwable t) {
+        LOG.warn("Error creating folder for check point. folder=" + folderPathFile, t);
+      }
+    }
+
+    if (folderPathFile.exists() && folderPathFile.isDirectory()) {
+      // Let's check whether we can create a file
+      File testFile = new File(folderPathFile, UUID.randomUUID().toString());
+      try {
+        testFile.createNewFile();
+        return testFile.delete();
+      } catch (IOException e) {
+        LOG.warn("Couldn't create test file in " + folderPathFile.getAbsolutePath() + " for checkPoint", e);
+      }
+    }
+    return false;
+  }
+
+  public File getCheckPointFolderFile() {
+    return checkPointFolderFile;
+  }
+
+  public void monitor() {
+    for (Input input : inputList) {
+      if (input.isReady()) {
+        input.monitor();
+      } else {
+        if (input.isTail()) {
+          LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
+              "So it might not be an issue. " + input.getShortDescription());
+          notReadyList.add(input);
+        } else {
+          LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
+        }
+      }
+    }
+    // Start the monitoring thread if any file is in tail mode
+    if (isAnyInputTail) {
+       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
+        @Override
+        public void run() {
+          LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
+          while (true) {
+            if (isDrain) {
+              LOG.info("Exiting missing file monitor.");
+              break;
+            }
+            try {
+              Iterator<Input> iter = notReadyList.iterator();
+              while (iter.hasNext()) {
+                Input input = iter.next();
+                try {
+                  if (input.isReady()) {
+                    input.monitor();
+                    iter.remove();
+                  }
+                } catch (Throwable t) {
+                  LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
+                }
+              }
+              Thread.sleep(30 * 1000);
+            } catch (Throwable t) {
+              // Ignore
+            }
+          }
+        }
+      };
+      inputIsReadyMonitor.start();
+    }
+  }
+
+  void addToNotReady(Input notReadyInput) {
+    notReadyList.add(notReadyInput);
+  }
+
+  public void addMetricsContainers(List<MetricData> metricsList) {
+    for (Input input : inputList) {
+      input.addMetricsContainers(metricsList);
+    }
+    filesCountMetric.value = getActiveFilesCount();
+    metricsList.add(filesCountMetric);
+  }
+
+  public void logStats() {
+    for (Input input : inputList) {
+      input.logStat();
+    }
+
+    filesCountMetric.value = getActiveFilesCount();
+    LogFeederUtil.logStatForMetric(filesCountMetric, "Stat: Files Monitored Count", "");
+  }
+
+
+  public void cleanCheckPointFiles() {
+
+    if (checkPointFolderFile == null) {
+      LOG.info("Will not clean checkPoint files. checkPointFolderFile=" + checkPointFolderFile);
+      return;
+    }
+    LOG.info("Cleaning checkPoint files. checkPointFolderFile=" + checkPointFolderFile.getAbsolutePath());
+    try {
+      // Loop over the check point files and if filePath is not present, then move to closed
+      String searchPath = "*" + checkPointExtension;
+      FileFilter fileFilter = new WildcardFileFilter(searchPath);
+      File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
+      int totalCheckFilesDeleted = 0;
+      for (File checkPointFile : checkPointFiles) {
+        try (RandomAccessFile checkPointReader = new RandomAccessFile(checkPointFile, "r")) {
+          int contentSize = checkPointReader.readInt();
+          byte b[] = new byte[contentSize];
+          int readSize = checkPointReader.read(b, 0, contentSize);
+          if (readSize != contentSize) {
+            LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read="
+              + readSize + ", checkPointFile=" + checkPointFile);
+          } else {
+            String jsonCheckPointStr = new String(b, 0, readSize);
+            Map<String, Object> jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
+
+            String logFilePath = (String) jsonCheckPoint.get("file_path");
+            String logFileKey = (String) jsonCheckPoint.get("file_key");
+            if (logFilePath != null && logFileKey != null) {
+              boolean deleteCheckPointFile = false;
+              File logFile = new File(logFilePath);
+              if (logFile.exists()) {
+                Object fileKeyObj = FileUtil.getFileKey(logFile);
+                String fileBase64 = Base64.byteArrayToBase64(fileKeyObj.toString().getBytes());
+                if (!logFileKey.equals(fileBase64)) {
+                  deleteCheckPointFile = true;
+                  LOG.info("CheckPoint clean: File key has changed. old=" + logFileKey + ", new=" + fileBase64 + ", filePath=" +
+                      logFilePath + ", checkPointFile=" + checkPointFile.getAbsolutePath());
+                }
+              } else {
+                LOG.info("CheckPoint clean: Log file doesn't exist. filePath=" + logFilePath + ", checkPointFile=" +
+                    checkPointFile.getAbsolutePath());
+                deleteCheckPointFile = true;
+              }
+              if (deleteCheckPointFile) {
+                LOG.info("Deleting CheckPoint file=" + checkPointFile.getAbsolutePath() + ", logFile=" + logFilePath);
+                checkPointFile.delete();
+                totalCheckFilesDeleted++;
+              }
+            }
+          }
+        } catch (EOFException eof) {
+          LOG.warn("Caught EOFException. Ignoring reading existing checkPoint file. " + checkPointFile);
+        } catch (Throwable t) {
+          LOG.error("Error while checking checkPoint file. " + checkPointFile, t);
+        }
+      }
+      LOG.info("Deleted " + totalCheckFilesDeleted + " checkPoint file(s). checkPointFolderFile=" +
+          checkPointFolderFile.getAbsolutePath());
+
+    } catch (Throwable t) {
+      LOG.error("Error while cleaning checkPointFiles", t);
+    }
+  }
+
+  public void waitOnAllInputs() {
+    //wait on inputs
+    for (Input input : inputList) {
+      if (input != null) {
+        Thread inputThread = input.getThread();
+        if (inputThread != null) {
+          try {
+            inputThread.join();
+          } catch (InterruptedException e) {
+            // ignore
+          }
+        }
+      }
+    }
+    // wait on monitor
+    if (inputIsReadyMonitor != null) {
+      try {
+        this.close();
+        inputIsReadyMonitor.join();
+      } catch (InterruptedException e) {
+        // ignore
+      }
+    }
+  }
+
+  public void checkInAll() {
+    for (Input input : inputList) {
+      input.lastCheckIn();
+    }
+  }
+
+  public void close() {
+    for (Input input : inputList) {
+      try {
+        input.setDrain(true);
+      } catch (Throwable t) {
+        LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+      }
+    }
+    isDrain = true;
+
+    // Need to get this value from property
+    int iterations = 30;
+    int waitTimeMS = 1000;
+    for (int i = 0; i < iterations; i++) {
+      boolean allClosed = true;
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          try {
+            allClosed = false;
+            LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
+            Thread.sleep(waitTimeMS);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+      if (allClosed) {
+        LOG.info("All inputs are closed. Iterations=" + i);
+        return;
+      }
+    }
+    
+    LOG.warn("Some inputs were not closed after " + iterations + " iterations");
+    for (Input input : inputList) {
+      if (!input.isClosed()) {
+        LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
index 48a7f1d..6767687 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMarker.java
@@ -23,13 +23,18 @@ package org.apache.ambari.logfeeder.input;
  * This file contains the file inode, line number of the log currently been read
  */
 public class InputMarker {
-  public int lineNumber = 0;
-  public Input input;
-  public String base64FileKey = null;
-
+  public final Input input;
+  public final String base64FileKey;
+  public final int lineNumber;
+  
+  public InputMarker(Input input, String base64FileKey, int lineNumber) {
+    this.input = input;
+    this.base64FileKey = base64FileKey;
+    this.lineNumber = lineNumber;
+  }
+  
   @Override
   public String toString() {
-    return "InputMarker [lineNumber=" + lineNumber + ", input="
-      + input.getShortDescription() + "]";
+    return "InputMarker [lineNumber=" + lineNumber + ", input=" + input.getShortDescription() + "]";
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
deleted file mode 100644
index b18c9b0..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputMgr.java
+++ /dev/null
@@ -1,451 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.input;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.commons.io.filefilter.WildcardFileFilter;
-import org.apache.log4j.Logger;
-import org.apache.solr.common.util.Base64;
-
-public class InputMgr {
-  private static final Logger logger = Logger.getLogger(InputMgr.class);
-
-  private List<Input> inputList = new ArrayList<Input>();
-  private Set<Input> notReadyList = new HashSet<Input>();
-
-  private boolean isDrain = false;
-  private boolean isAnyInputTail = false;
-
-  private String checkPointSubFolderName = "logfeeder_checkpoints";
-  private File checkPointFolderFile = null;
-
-  private MetricCount filesCountMetric = new MetricCount();
-
-  private String checkPointExtension = ".cp";
-  
-  private Thread inputIsReadyMonitor = null;
-
-  public List<Input> getInputList() {
-    return inputList;
-  }
-
-  public void add(Input input) {
-    inputList.add(input);
-  }
-
-  public void removeInput(Input input) {
-    logger.info("Trying to remove from inputList. "
-      + input.getShortDescription());
-    Iterator<Input> iter = inputList.iterator();
-    while (iter.hasNext()) {
-      Input iterInput = iter.next();
-      if (iterInput.equals(input)) {
-        logger.info("Removing Input from inputList. "
-          + input.getShortDescription());
-        iter.remove();
-      }
-    }
-  }
-
-  public int getActiveFilesCount() {
-    int count = 0;
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        count++;
-      }
-    }
-    return count;
-  }
-
-  public void init() {
-    filesCountMetric.metricsName = "input.files.count";
-    filesCountMetric.isPointInTime = true;
-
-    checkPointExtension = LogFeederUtil.getStringProperty(
-      "logfeeder.checkpoint.extension", checkPointExtension);
-    for (Input input : inputList) {
-      try {
-        input.init();
-        if (input.isTail()) {
-          isAnyInputTail = true;
-        }
-      } catch (Exception e) {
-        logger.error(
-          "Error initializing input. "
-            + input.getShortDescription(), e);
-      }
-    }
-
-    if (isAnyInputTail) {
-      logger.info("Determining valid checkpoint folder");
-      boolean isCheckPointFolderValid = false;
-      // We need to keep track of the files we are reading.
-      String checkPointFolder = LogFeederUtil
-        .getStringProperty("logfeeder.checkpoint.folder");
-      if (checkPointFolder != null && !checkPointFolder.isEmpty()) {
-        checkPointFolderFile = new File(checkPointFolder);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's try home folder
-        String userHome = LogFeederUtil.getStringProperty("user.home");
-        if (userHome != null) {
-          checkPointFolderFile = new File(userHome,
-            checkPointSubFolderName);
-          logger.info("Checking if home folder can be used for checkpoints. Folder="
-            + checkPointFolderFile);
-          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        }
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's use tmp folder
-        String tmpFolder = LogFeederUtil
-          .getStringProperty("java.io.tmpdir");
-        if (tmpFolder == null) {
-          tmpFolder = "/tmp";
-        }
-        checkPointFolderFile = new File(tmpFolder,
-          checkPointSubFolderName);
-        logger.info("Checking if tmps folder can be used for checkpoints. Folder="
-          + checkPointFolderFile);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        if (isCheckPointFolderValid) {
-          logger.warn("Using tmp folder "
-            + checkPointFolderFile
-            + " to store check points. This is not recommended."
-            + "Please set logfeeder.checkpoint.folder property");
-        }
-      }
-
-      if (isCheckPointFolderValid) {
-        logger.info("Using folder " + checkPointFolderFile
-          + " for storing checkpoints");
-      }
-    }
-
-  }
-
-  public File getCheckPointFolderFile() {
-    return checkPointFolderFile;
-  }
-
-  private boolean verifyCheckPointFolder(File folderPathFile) {
-    if (!folderPathFile.exists()) {
-      // Create the folder
-      try {
-        if (!folderPathFile.mkdir()) {
-          logger.warn("Error creating folder for check point. folder="
-            + folderPathFile);
-        }
-      } catch (Throwable t) {
-        logger.warn("Error creating folder for check point. folder="
-          + folderPathFile, t);
-      }
-    }
-
-    if (folderPathFile.exists() && folderPathFile.isDirectory()) {
-      // Let's check whether we can create a file
-      File testFile = new File(folderPathFile, UUID.randomUUID()
-        .toString());
-      try {
-        testFile.createNewFile();
-        return testFile.delete();
-      } catch (IOException e) {
-        logger.warn(
-          "Couldn't create test file in "
-            + folderPathFile.getAbsolutePath()
-            + " for checkPoint", e);
-      }
-    }
-    return false;
-  }
-
-  public void monitor() {
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        input.monitor();
-      } else {
-        if (input.isTail()) {
-          logger.info("Adding input to not ready list. Note, it is possible this component is not run on this host. So it might not be an issue. "
-            + input.getShortDescription());
-          notReadyList.add(input);
-        } else {
-          logger.info("Input is not ready, so going to ignore it "
-            + input.getShortDescription());
-        }
-      }
-    }
-    // Start the monitoring thread if any file is in tail mode
-    if (isAnyInputTail) {
-       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
-        @Override
-        public void run() {
-          logger.info("Going to monitor for these missing files: "
-            + notReadyList.toString());
-          while (true) {
-            if (isDrain) {
-              logger.info("Exiting missing file monitor.");
-              break;
-            }
-            try {
-              Iterator<Input> iter = notReadyList.iterator();
-              while (iter.hasNext()) {
-                Input input = iter.next();
-                try {
-                  if (input.isReady()) {
-                    input.monitor();
-                    iter.remove();
-                  }
-                } catch (Throwable t) {
-                  logger.error("Error while enabling monitoring for input. "
-                    + input.getShortDescription());
-                }
-              }
-              Thread.sleep(30 * 1000);
-            } catch (Throwable t) {
-              // Ignore
-            }
-          }
-        }
-      };
-      inputIsReadyMonitor.start();
-    }
-  }
-
-  public void addToNotReady(Input notReadyInput) {
-    notReadyList.add(notReadyInput);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    for (Input input : inputList) {
-      input.addMetricsContainers(metricsList);
-    }
-    filesCountMetric.count = getActiveFilesCount();
-    metricsList.add(filesCountMetric);
-  }
-
-  public void logStats() {
-    for (Input input : inputList) {
-      input.logStat();
-    }
-
-    filesCountMetric.count = getActiveFilesCount();
-    LogFeederUtil.logStatForMetric(filesCountMetric,
-      "Stat: Files Monitored Count", null);
-  }
-
-  public void close() {
-    for (Input input : inputList) {
-      try {
-        input.setDrain(true);
-      } catch (Throwable t) {
-        logger.error(
-          "Error while draining. input="
-            + input.getShortDescription(), t);
-      }
-    }
-    isDrain = true;
-
-    // Need to get this value from property
-    int iterations = 30;
-    int waitTimeMS = 1000;
-    int i = 0;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          try {
-            allClosed = false;
-            logger.warn("Waiting for input to close. "
-              + input.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
-          }
-        }
-      }
-      if (allClosed) {
-        break;
-      }
-    }
-    if (!allClosed) {
-      logger.warn("Some inputs were not closed. Iterations=" + i);
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          logger.warn("Input not closed. Will ignore it."
-            + input.getShortDescription());
-        }
-      }
-    } else {
-      logger.info("All inputs are closed. Iterations=" + i);
-    }
-
-  }
-
-  public void checkInAll() {
-    for (Input input : inputList) {
-      input.checkIn();
-    }
-  }
-
-  public void cleanCheckPointFiles() {
-
-    if (checkPointFolderFile == null) {
-      logger.info("Will not clean checkPoint files. checkPointFolderFile="
-        + checkPointFolderFile);
-      return;
-    }
-    logger.info("Cleaning checkPoint files. checkPointFolderFile="
-      + checkPointFolderFile.getAbsolutePath());
-    try {
-      // Loop over the check point files and if filePath is not present, then move to closed
-      String searchPath = "*" + checkPointExtension;
-      FileFilter fileFilter = new WildcardFileFilter(searchPath);
-      File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
-      int totalCheckFilesDeleted = 0;
-      for (File checkPointFile : checkPointFiles) {
-        RandomAccessFile checkPointReader = null;
-        try {
-          checkPointReader = new RandomAccessFile(checkPointFile, "r");
-
-          int contentSize = checkPointReader.readInt();
-          byte b[] = new byte[contentSize];
-          int readSize = checkPointReader.read(b, 0, contentSize);
-          if (readSize != contentSize) {
-            logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
-              + contentSize
-              + ", read="
-              + readSize
-              + ", checkPointFile=" + checkPointFile);
-          } else {
-            // Create JSON string
-            String jsonCheckPointStr = new String(b, 0, readSize);
-            Map<String, Object> jsonCheckPoint = LogFeederUtil
-              .toJSONObject(jsonCheckPointStr);
-
-            String logFilePath = (String) jsonCheckPoint
-              .get("file_path");
-            String logFileKey = (String) jsonCheckPoint
-              .get("file_key");
-            if (logFilePath != null && logFileKey != null) {
-              boolean deleteCheckPointFile = false;
-              File logFile = new File(logFilePath);
-              if (logFile.exists()) {
-                Object fileKeyObj = InputFile
-                  .getFileKey(logFile);
-                String fileBase64 = Base64
-                  .byteArrayToBase64(fileKeyObj
-                    .toString().getBytes());
-                if (!logFileKey.equals(fileBase64)) {
-                  deleteCheckPointFile = true;
-                  logger.info("CheckPoint clean: File key has changed. old="
-                    + logFileKey
-                    + ", new="
-                    + fileBase64
-                    + ", filePath="
-                    + logFilePath
-                    + ", checkPointFile="
-                    + checkPointFile.getAbsolutePath());
-                }
-              } else {
-                logger.info("CheckPoint clean: Log file doesn't exist. filePath="
-                  + logFilePath
-                  + ", checkPointFile="
-                  + checkPointFile.getAbsolutePath());
-                deleteCheckPointFile = true;
-              }
-              if (deleteCheckPointFile) {
-                logger.info("Deleting CheckPoint file="
-                  + checkPointFile.getAbsolutePath()
-                  + ", logFile=" + logFilePath);
-                checkPointFile.delete();
-                totalCheckFilesDeleted++;
-              }
-            }
-          }
-        } catch (EOFException eof) {
-          logger.warn("Caught EOFException. Ignoring reading existing checkPoint file. "
-            + checkPointFile);
-        } catch (Throwable t) {
-          logger.error("Error while checking checkPoint file. "
-            + checkPointFile, t);
-        } finally {
-          if (checkPointReader != null) {
-            try {
-              checkPointReader.close();
-            } catch (Throwable t) {
-              logger.error("Error closing checkPoint file. "
-                + checkPointFile, t);
-            }
-          }
-        }
-      }
-      logger.info("Deleted " + totalCheckFilesDeleted
-        + " checkPoint file(s). checkPointFolderFile="
-        + checkPointFolderFile.getAbsolutePath());
-
-    } catch (Throwable t) {
-      logger.error("Error while cleaning checkPointFiles", t);
-    }
-  }
-
-  public void waitOnAllInputs() {
-    //wait on inputs
-    if (inputList != null) {
-      for (Input input : inputList) {
-        if (input != null) {
-          Thread inputThread = input.getThread();
-          if (inputThread != null) {
-            try {
-              inputThread.join();
-            } catch (InterruptedException e) {
-              // ignore
-            }
-          }
-        }
-      }
-    }
-    // wait on monitor
-    if (inputIsReadyMonitor != null) {
-      try {
-        this.close();
-        inputIsReadyMonitor.join();
-      } catch (InterruptedException e) {
-        // ignore
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
index c9d28bd..f560379 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputS3File.java
@@ -19,201 +19,57 @@
 package org.apache.ambari.logfeeder.input;
 
 import java.io.BufferedReader;
-import java.io.EOFException;
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.S3Util;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.solr.common.util.Base64;
 
-public class InputS3File extends Input {
-  private static final Logger logger = Logger.getLogger(InputS3File.class);
-
-  private String logPath = null;
-  private boolean isStartFromBegining = true;
-
-  private boolean isReady = false;
-  private String[] s3LogPathFiles = null;
-  private Object fileKey = null;
-  private String base64FileKey = null;
-
-  private boolean isRolledOver = false;
-  private boolean addWildCard = false;
-
-  private long lastCheckPointTimeMS = 0;
-  private int checkPointIntervalMS = 5 * 1000; // 5 seconds
-  private RandomAccessFile checkPointWriter = null;
-  private Map<String, Object> jsonCheckPoint = null;
-
-  private File checkPointFile = null;
-
-  private InputMarker lastCheckPointInputMarker = null;
-
-  private String checkPointExtension = ".cp";
-
-
-  @Override
-  public void init() throws Exception {
-    logger.info("init() called");
-    statMetric.metricsName = "input.files.read_lines";
-    readBytesMetric.metricsName = "input.files.read_bytes";
-    checkPointExtension = LogFeederUtil.getStringProperty(
-        "logfeeder.checkpoint.extension", checkPointExtension);
-
-    // Let's close the file and set it to true after we start monitoring it
-    setClosed(true);
-    logPath = getStringValue("path");
-    tail = getBooleanValue("tail", tail);
-    addWildCard = getBooleanValue("add_wild_card", addWildCard);
-    checkPointIntervalMS = getIntValue("checkpoint.interval.ms",
-        checkPointIntervalMS);
-    if (logPath == null || logPath.isEmpty()) {
-      logger.error("path is empty for file input. " + getShortDescription());
-      return;
-    }
-
-    String startPosition = getStringValue("start_position");
-    if (StringUtils.isEmpty(startPosition)
-        || startPosition.equalsIgnoreCase("beginning")
-        || startPosition.equalsIgnoreCase("begining")) {
-      isStartFromBegining = true;
-    }
-
-    if (!tail) {
-      // start position end doesn't apply if we are not tailing
-      isStartFromBegining = true;
-    }
-
-    setFilePath(logPath);
-    boolean isFileReady = isReady();
-
-    logger.info("File to monitor " + logPath + ", tail=" + tail
-        + ", addWildCard=" + addWildCard + ", isReady=" + isFileReady);
-
-    super.init();
-  }
+public class InputS3File extends AbstractInputFile {
 
   @Override
   public boolean isReady() {
     if (!isReady) {
       // Let's try to check whether the file is available
-      s3LogPathFiles = getActualFiles(logPath);
-      if (s3LogPathFiles != null && s3LogPathFiles.length > 0) {
-        if (isTail() && s3LogPathFiles.length > 1) {
-          logger.warn("Found multiple files (" + s3LogPathFiles.length
-              + ") for the file filter " + filePath
-              + ". Will use only the first one. Using " + s3LogPathFiles[0]);
+      logFiles = getActualFiles(logPath);
+      if (!ArrayUtils.isEmpty(logFiles)) {
+        if (tail && logFiles.length > 1) {
+          LOG.warn("Found multiple files (" + logFiles.length + ") for the file filter " + filePath +
+              ". Will use only the first one. Using " + logFiles[0].getAbsolutePath());
         }
-        logger.info("File filter " + filePath + " expanded to "
-            + s3LogPathFiles[0]);
+        LOG.info("File filter " + filePath + " expanded to " + logFiles[0].getAbsolutePath());
         isReady = true;
       } else {
-        logger.debug(logPath + " file doesn't exist. Ignoring for now");
+        LOG.debug(logPath + " file doesn't exist. Ignoring for now");
       }
     }
     return isReady;
   }
 
-  private String[] getActualFiles(String searchPath) {
+  private File[] getActualFiles(String searchPath) {
     // TODO search file on s3
-    return new String[] { searchPath };
-  }
-
-  @Override
-  synchronized public void checkIn(InputMarker inputMarker) {
-    super.checkIn(inputMarker);
-    if (checkPointWriter != null) {
-      try {
-        int lineNumber = LogFeederUtil.objectToInt(
-            jsonCheckPoint.get("line_number"), 0, "line_number");
-        if (lineNumber > inputMarker.lineNumber) {
-          // Already wrote higher line number for this input
-          return;
-        }
-        // If interval is greater than last checkPoint time, then write
-        long currMS = System.currentTimeMillis();
-        if (!isClosed()
-            && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
-          // Let's save this one so we can update the check point file
-          // on flush
-          lastCheckPointInputMarker = inputMarker;
-          return;
-        }
-        lastCheckPointTimeMS = currMS;
-
-        jsonCheckPoint.put("line_number", ""
-            + new Integer(inputMarker.lineNumber));
-        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
-        jsonCheckPoint.put("last_write_time_date", new Date());
-
-        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
-
-        // Let's rewind
-        checkPointWriter.seek(0);
-        checkPointWriter.writeInt(jsonStr.length());
-        checkPointWriter.write(jsonStr.getBytes());
-
-        if (isClosed()) {
-          final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-              + "_FINAL_CHECKIN";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-              "Wrote final checkPoint, input=" + getShortDescription()
-                  + ", checkPointFile=" + checkPointFile.getAbsolutePath()
-                  + ", checkPoint=" + jsonStr, null, logger, Level.INFO);
-        }
-      } catch (Throwable t) {
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-            + "_CHECKIN_EXCEPTION";
-        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-            "Caught exception checkIn. , input=" + getShortDescription(), t,
-            logger, Level.ERROR);
-      }
-    }
-
-  }
-
-  @Override
-  public void checkIn() {
-    super.checkIn();
-    if (lastCheckPointInputMarker != null) {
-      checkIn(lastCheckPointInputMarker);
-    }
-  }
-
-  @Override
-  public void rollOver() {
-    logger.info("Marking this input file for rollover. "
-        + getShortDescription());
-    isRolledOver = true;
+    return new File[] { new File(searchPath) };
   }
 
   @Override
   void start() throws Exception {
-    if (s3LogPathFiles == null || s3LogPathFiles.length == 0) {
+    if (ArrayUtils.isEmpty(logFiles)) {
       return;
     }
 
-    if (isTail()) {
-      processFile(s3LogPathFiles[0]);
+    if (tail) {
+      processFile(logFiles[0]);
     } else {
-      for (String s3FilePath : s3LogPathFiles) {
+      for (File s3FilePath : logFiles) {
         try {
           processFile(s3FilePath);
           if (isClosed() || isDrain()) {
-            logger.info("isClosed or isDrain. Now breaking loop.");
+            LOG.info("isClosed or isDrain. Now breaking loop.");
             break;
           }
         } catch (Throwable t) {
-          logger.error("Error processing file=" + s3FilePath, t);
+          LOG.error("Error processing file=" + s3FilePath, t);
         }
       }
     }
@@ -221,244 +77,18 @@ public class InputS3File extends Input {
   }
 
   @Override
-  public void close() {
-    super.close();
-    logger.info("close() calling checkPoint checkIn(). "
-        + getShortDescription());
-    checkIn();
-  }
-
-  private void processFile(String logPathFile) throws FileNotFoundException,
-      IOException {
-    logger.info("Monitoring logPath=" + logPath + ", logPathFile="
-        + logPathFile);
-    BufferedReader br = null;
-    checkPointFile = null;
-    checkPointWriter = null;
-    jsonCheckPoint = null;
-    int resumeFromLineNumber = 0;
-
-    int lineCount = 0;
-    try {
-      setFilePath(logPathFile);
-      String s3AccessKey = getStringValue("s3_access_key");
-      String s3SecretKey = getStringValue("s3_secret_key");
-      br = S3Util.INSTANCE.getReader(logPathFile,s3AccessKey,s3SecretKey);
-      if(br==null){
-        //log err
-        return;
-      }
-      
-      // Whether to send to output from the beginning.
-      boolean resume = isStartFromBegining;
-
-      // Seems FileWatch is not reliable, so let's only use file key comparison
-      fileKey = getFileKey(logPathFile);
-      base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
-      logger.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". "
-          + getShortDescription());
-
-      if (isTail()) {
-        try {
-          // Let's see if there is a checkpoint for this file
-          logger.info("Checking existing checkpoint file. "
-              + getShortDescription());
-
-          String fileBase64 = Base64.byteArrayToBase64(fileKey.toString()
-              .getBytes());
-          String checkPointFileName = fileBase64 + checkPointExtension;
-          File checkPointFolder = inputMgr.getCheckPointFolderFile();
-          checkPointFile = new File(checkPointFolder, checkPointFileName);
-          checkPointWriter = new RandomAccessFile(checkPointFile, "rw");
-
-          try {
-            int contentSize = checkPointWriter.readInt();
-            byte b[] = new byte[contentSize];
-            int readSize = checkPointWriter.read(b, 0, contentSize);
-            if (readSize != contentSize) {
-              logger
-                  .error("Couldn't read expected number of bytes from checkpoint file. expected="
-                      + contentSize
-                      + ", read="
-                      + readSize
-                      + ", checkPointFile="
-                      + checkPointFile
-                      + ", input="
-                      + getShortDescription());
-            } else {
-              String jsonCheckPointStr = new String(b, 0, readSize);
-              jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
-
-              resumeFromLineNumber = LogFeederUtil.objectToInt(
-                  jsonCheckPoint.get("line_number"), 0, "line_number");
-
-              if (resumeFromLineNumber > 0) {
-                // Let's read from last line read
-                resume = false;
-              }
-              logger.info("CheckPoint. checkPointFile=" + checkPointFile
-                  + ", json=" + jsonCheckPointStr + ", resumeFromLineNumber="
-                  + resumeFromLineNumber + ", resume=" + resume);
-            }
-          } catch (EOFException eofEx) {
-            logger.info("EOFException. Will reset checkpoint file "
-                + checkPointFile.getAbsolutePath() + " for "
-                + getShortDescription());
-          }
-          if (jsonCheckPoint == null) {
-            // This seems to be first time, so creating the initial
-            // checkPoint object
-            jsonCheckPoint = new HashMap<String, Object>();
-            jsonCheckPoint.put("file_path", filePath);
-            jsonCheckPoint.put("file_key", fileBase64);
-          }
-
-        } catch (Throwable t) {
-          logger.error(
-              "Error while configuring checkpoint file. Will reset file. checkPointFile="
-                  + checkPointFile, t);
-        }
-      }
-
-      setClosed(false);
-      int sleepStep = 2;
-      int sleepIteration = 0;
-      while (true) {
-        try {
-          if (isDrain()) {
-            break;
-          }
-
-          String line = br.readLine();
-          if (line == null) {
-            if (!resume) {
-              resume = true;
-            }
-            sleepIteration++;
-            try {
-              // Since FileWatch service is not reliable, we will check
-              // file inode every n seconds after no write
-              if (sleepIteration > 4) {
-                Object newFileKey = getFileKey(logPathFile);
-                if (newFileKey != null) {
-                  if (fileKey == null || !newFileKey.equals(fileKey)) {
-                    logger
-                        .info("File key is different. Calling rollover. oldKey="
-                            + fileKey
-                            + ", newKey="
-                            + newFileKey
-                            + ". "
-                            + getShortDescription());
-                    // File has rotated.
-                    rollOver();
-                  }
-                }
-              }
-              // Flush on the second iteration
-              if (!tail && sleepIteration >= 2) {
-                logger.info("End of file. Done with filePath=" + logPathFile
-                    + ", lineCount=" + lineCount);
-                flush();
-                break;
-              } else if (sleepIteration == 2) {
-                flush();
-              } else if (sleepIteration >= 2) {
-                if (isRolledOver) {
-                  isRolledOver = false;
-                  // Close existing file
-                  try {
-                    logger
-                        .info("File is rolled over. Closing current open file."
-                            + getShortDescription() + ", lineCount="
-                            + lineCount);
-                    br.close();
-                  } catch (Exception ex) {
-                    logger.error("Error closing file" + getShortDescription());
-                    break;
-                  }
-                  try {
-                    // Open new file
-                    logger.info("Opening new rolled over file."
-                        + getShortDescription());
-                    br = S3Util.INSTANCE.getReader(logPathFile,s3AccessKey,s3SecretKey);
-                    lineCount = 0;
-                    fileKey = getFileKey(logPathFile);
-                    base64FileKey = Base64.byteArrayToBase64(fileKey.toString()
-                        .getBytes());
-                    logger.info("fileKey=" + fileKey + ", base64="
-                        + base64FileKey + ", " + getShortDescription());
-                  } catch (Exception ex) {
-                    logger.error("Error opening rolled over file. "
-                        + getShortDescription());
-                    // Let's add this to monitoring and exit this thread
-                    logger.info("Added input to not ready list."
-                        + getShortDescription());
-                    isReady = false;
-                    inputMgr.addToNotReady(this);
-                    break;
-                  }
-                  logger.info("File is successfully rolled over. "
-                      + getShortDescription());
-                  continue;
-                }
-              }
-              Thread.sleep(sleepStep * 1000);
-              sleepStep = (sleepStep * 2);
-              sleepStep = sleepStep > 10 ? 10 : sleepStep;
-            } catch (InterruptedException e) {
-              logger.info("Thread interrupted." + getShortDescription());
-            }
-          } else {
-            lineCount++;
-            sleepStep = 1;
-            sleepIteration = 0;
-
-            if (!resume && lineCount > resumeFromLineNumber) {
-              logger.info("Resuming to read from last line. lineCount="
-                  + lineCount + ", input=" + getShortDescription());
-              resume = true;
-            }
-            if (resume) {
-              InputMarker marker = new InputMarker();
-              marker.base64FileKey = base64FileKey;
-              marker.input = this;
-              marker.lineNumber = lineCount;
-              outputLine(line, marker);
-            }
-          }
-        } catch (Throwable t) {
-          final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-              + "_READ_LOOP_EXCEPTION";
-          LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-              "Caught exception in read loop. lineNumber=" + lineCount
-                  + ", input=" + getShortDescription(), t, logger, Level.ERROR);
-
-        }
-      }
-    } finally {
-      if (br != null) {
-        logger.info("Closing reader." + getShortDescription() + ", lineCount="
-            + lineCount);
-        try {
-          br.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
-    }
-  }
-
-  static public Object getFileKey(String s3FilePath) {
-    return s3FilePath.toString();
+  protected BufferedReader openLogFile(File logPathFile) throws IOException {
+    String s3AccessKey = getStringValue("s3_access_key");
+    String s3SecretKey = getStringValue("s3_secret_key");
+    BufferedReader br = S3Util.getReader(logPathFile.getPath(), s3AccessKey, s3SecretKey);
+    fileKey = getFileKey(logPathFile);
+    base64FileKey = Base64.byteArrayToBase64(fileKey.toString().getBytes());
+    LOG.info("fileKey=" + fileKey + ", base64=" + base64FileKey + ". " + getShortDescription());
+    return br;
   }
 
   @Override
-  public String getShortDescription() {
-    return "input:source="
-        + getStringValue("source")
-        + ", path="
-        + (s3LogPathFiles != null && s3LogPathFiles.length > 0 ? s3LogPathFiles[0]
-            : getStringValue("path"));
+  protected Object getFileKey(File logFile) {
+    return logFile.getPath();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
index 5ba56a5..743be69 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
@@ -18,7 +18,7 @@
  */
 package org.apache.ambari.logfeeder.input;
 
-import java.net.Inet4Address;
+import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
@@ -66,7 +66,7 @@ public class InputSimulate extends Input {
     
     Filter filter = new FilterJSON();
     filter.setInput(this);
-    setFirstFilter(filter);
+    addFilter(filter);
   }
   
   private List<String> getSimulatedLogTypes() {
@@ -88,23 +88,18 @@ public class InputSimulate extends Input {
     
     return LOG_TEXT_PATTERN.replaceAll("<LOG_MESSAGE_PATTERN>", logMessagePattern);
   }
-  
-  @Override
-  public String getNameForThread() {
-    return "Simulated input";
-  }
 
   @Override
-  public String getShortDescription() {
-    return "Simulated input";
+  public boolean isReady() {
+    return true;
   }
-  
+
   @Override
   void start() throws Exception {
     if (types.isEmpty())
       return;
     
-    getFirstFilter().setOutputMgr(outputMgr);
+    getFirstFilter().setOutputManager(outputManager);
     while (true) {
       String type = imitateRandomLogFile();
       
@@ -129,10 +124,7 @@ public class InputSimulate extends Input {
   }
 
   private InputMarker getInputMarker(String type) throws Exception {
-    InputMarker marker = new InputMarker();
-    marker.input = this;
-    marker.lineNumber = getLineNumber(type);
-    marker.base64FileKey = getBase64FileKey();
+    InputMarker marker = new InputMarker(this, getBase64FileKey(), getLineNumber(type));
     return marker;
   }
 
@@ -147,7 +139,7 @@ public class InputSimulate extends Input {
   }
 
   private String getBase64FileKey() throws Exception {
-    String fileKey = Inet4Address.getLocalHost().getHostAddress() + "|" + filePath;
+    String fileKey = InetAddress.getLocalHost().getHostAddress() + "|" + filePath;
     return Base64.byteArrayToBase64(fileKey.getBytes());
   }
 
@@ -155,4 +147,20 @@ public class InputSimulate extends Input {
     Date d = new Date();
     return String.format(logText, d.getTime(), level, marker.lineNumber);
   }
+
+  @Override
+  public void checkIn(InputMarker inputMarker) {}
+
+  @Override
+  public void lastCheckIn() {}
+  
+  @Override
+  public String getNameForThread() {
+    return "Simulated input";
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "Simulated input";
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
index a2a9db2..9ccc4f2 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/GZIPReader.java
@@ -18,7 +18,6 @@
  */
 package org.apache.ambari.logfeeder.input.reader;
 
-import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -30,15 +29,11 @@ import org.apache.log4j.Logger;
 
 class GZIPReader extends InputStreamReader {
 
-  private static Logger logger = Logger.getLogger(GZIPReader.class);
+  private static final Logger LOG = Logger.getLogger(GZIPReader.class);
 
   GZIPReader(String fileName) throws FileNotFoundException {
     super(getStream(fileName));
-    logger.info("Created GZIPReader for file : " + fileName);
-  }
-
-  GZIPReader(File file) throws FileNotFoundException {
-    super(getStream(file.getName()));
+    LOG.info("Created GZIPReader for file : " + fileName);
   }
 
   private static InputStream getStream(String fileName) {
@@ -48,7 +43,7 @@ class GZIPReader extends InputStreamReader {
       fileStream = new FileInputStream(fileName);
       gzipStream = new GZIPInputStream(fileStream);
     } catch (Exception e) {
-      logger.error(e, e.getCause());
+      LOG.error(e, e.getCause());
     }
     return gzipStream;
   }
@@ -58,21 +53,13 @@ class GZIPReader extends InputStreamReader {
    */
   static boolean isValidFile(String fileName) {
     // TODO make it generic and put in factory itself
-    InputStream is = null;
-    try {
-      is = new FileInputStream(fileName);
+    
+    try (InputStream is = new FileInputStream(fileName)) {
       byte[] signature = new byte[2];
       int nread = is.read(signature); // read the gzip signature
       return nread == 2 && signature[0] == (byte) 0x1f && signature[1] == (byte) 0x8b;
     } catch (IOException e) {
       return false;
-    } finally {
-      if (is != null) {
-        try {
-          is.close();
-        } catch (IOException e) {
-        }
-      }
     }
   }
 }


[43/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code - Part 2 (oleewere)

Posted by ol...@apache.org.
AMBARI-18310. Refactor logsearch portal side code - Part 2 (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d15e0b2d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d15e0b2d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d15e0b2d

Branch: refs/heads/branch-dev-logsearch
Commit: d15e0b2d1c2a85c46c1ce862965bf30112e65b8b
Parents: bc7e0aa
Author: oleewere <ol...@gmail.com>
Authored: Wed Sep 7 21:56:39 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:01 2016 +0200

----------------------------------------------------------------------
 ambari-logsearch/ambari-logsearch-it/pom.xml    |   5 +
 .../logsearch/steps/LogSearchApiSteps.java      |  95 +++++++++
 .../ambari/logsearch/steps/SolrSteps.java       |  13 +-
 .../logsearch/story/LogSearchApiQueryStory.java |  22 +++
 .../ambari/logsearch/story/LogSearchStory.java  |   4 +-
 .../story/log_search_api_query_story.story      |  17 ++
 .../story/logfeeder_parsing_story.story         |  16 +-
 .../service-log-level-counts-values.json        |  33 ++++
 .../test-output/service-log-schema.json         |  18 ++
 .../ambari/logfeeder/output/OutputSolr.java     |   9 +-
 .../ambari-logsearch-portal/pom.xml             |   6 +
 .../org/apache/ambari/logsearch/LogSearch.java  |  38 +++-
 .../logsearch/common/ExternalServerClient.java  |  16 +-
 .../logsearch/conf/ApplicationConfig.java       |  41 +---
 .../ambari/logsearch/conf/AuthConfig.java       |  12 +-
 .../ambari/logsearch/conf/SecurityConfig.java   | 112 +++++++++++
 .../ambari/logsearch/dao/SolrDaoBase.java       |   1 -
 .../apache/ambari/logsearch/dao/UserDao.java    |  13 +-
 .../logsearch/manager/AuditLogsManager.java     |  44 +++--
 .../ambari/logsearch/manager/PublicManager.java |   6 +-
 .../logsearch/manager/ServiceLogsManager.java   | 142 ++++---------
 .../logsearch/model/response/AuditLogData.java  |   4 +-
 .../ambari/logsearch/query/QueryGeneration.java |  48 +++--
 .../query/SearchCriteriaConstants.java          |  78 ++++++++
 .../AbstractCommonAuditLogRequestConverter.java |  16 +-
 .../AbstractCommonSearchRequestConverter.java   |  10 +-
 ...bstractCommonServiceLogRequestConverter.java |  39 ++--
 .../query/converter/AbstractConverterAware.java |  47 +++++
 .../converter/AnyGraphRequestConverter.java     |  12 +-
 .../AuditBarGraphRequestConverter.java          |   2 +-
 .../converter/AuditLogRequestConverter.java     |   2 +-
 .../BaseServiceLogRequestConverter.java         |   7 +-
 .../FieldAuditLogRequestConverter.java          |   2 +-
 .../FieldBarGraphRequestConverter.java          |   4 +-
 .../converter/LogFileRequestConverter.java      |   6 +-
 .../converter/LogFileTailRequestConverter.java  |   8 +-
 .../ServiceAnyGraphRequestConverter.java        |  12 +-
 .../ServiceExtremeDatesRequestConverter.java    |   3 +-
 .../converter/ServiceGraphRequestConverter.java |   6 +-
 .../ServiceLogExportRequestConverter.java       |   9 +-
 .../ServiceLogFileRequestConverter.java         |   4 +-
 .../converter/ServiceLogRequestConverter.java   |  14 +-
 .../ServiceLogTruncatedRequestConverter.java    |  10 +-
 .../converter/SimpleQueryRequestConverter.java  |  11 +-
 .../converter/UserConfigRequestConverter.java   |  10 +-
 .../converter/UserExportRequestConverter.java   |   3 +-
 .../query/model/AnyGraphSearchCriteria.java     |  56 +++++-
 .../model/AuditBarGraphSearchCriteria.java      |  12 +-
 .../query/model/AuditLogSearchCriteria.java     |  12 +-
 .../query/model/CommonSearchCriteria.java       |  93 +++++----
 .../model/CommonServiceLogSearchCriteria.java   |  96 +++++++++
 .../model/FieldAuditBarGraphSearchCriteria.java |  11 +-
 .../model/FieldAuditLogSearchCriteria.java      |  11 +-
 .../query/model/LogFileSearchCriteria.java      |  29 ++-
 .../query/model/LogFileTailSearchCriteria.java  |  13 +-
 .../logsearch/query/model/SearchCriteria.java   |  60 +++---
 .../model/ServiceAnyGraphSearchCriteria.java    |  37 +++-
 .../model/ServiceExtremeDatesCriteria.java      |  11 +-
 .../query/model/ServiceGraphSearchCriteria.java |  11 +-
 .../model/ServiceLogExportSearchCriteria.java   |  19 +-
 .../model/ServiceLogFileSearchCriteria.java     |  21 +-
 .../query/model/ServiceLogSearchCriteria.java   |  44 ++++-
 .../ServiceLogTruncatedSearchCriteria.java      |  27 ++-
 .../query/model/UserConfigSearchCriteria.java   |  27 ++-
 .../query/model/UserExportSearchCriteria.java   |  10 +-
 .../logsearch/rest/AuditLogsResource.java       |   2 +-
 .../logsearch/rest/ServiceLogsResource.java     |   7 +-
 .../logsearch/solr/metrics/SolrAmsClient.java   |  67 -------
 .../logsearch/solr/metrics/SolrJmxAdapter.java  | 134 -------------
 .../solr/metrics/SolrMetricsLoader.java         | 198 -------------------
 .../logsearch/solr/model/SolrAuditLogData.java  |   6 +-
 .../LogsearchLogoutSuccessHandler.java          |   2 +-
 .../LogsearchAuthenticationEntryPoint.java      |   2 +-
 .../LogsearchKRBAuthenticationFilter.java       |   1 +
 ...LogsearchSecurityContextFormationFilter.java |   1 +
 ...rchUsernamePasswordAuthenticationFilter.java |   2 +
 .../web/listeners/SpringEventListener.java      |  32 ---
 ...LogsearchAbstractAuthenticationProvider.java |  10 -
 .../LogsearchAuthenticationProvider.java        |  13 +-
 ...rchExternalServerAuthenticationProvider.java |  12 +-
 .../LogsearchFileAuthenticationProvider.java    |  11 +-
 .../LogsearchLdapAuthenticationProvider.java    |  22 ++-
 .../LogsearchLdapBindAuthenticator.java         |  64 +-----
 .../LogsearchSimpleAuthenticationProvider.java  |  13 +-
 .../src/main/resources/log4j.xml                |   6 +-
 .../META-INF/security-applicationContext.xml    |  83 --------
 .../src/main/webapp/WEB-INF/web.xml             |  62 ------
 .../test-config/logfeeder/logfeeder.properties  |   3 +-
 .../shipper-conf/input.config-hdfs.json         | 172 ++++++++++++++++
 .../docker/test-logs/hdfs-audit/hdfs-audit.log  |   4 +
 90 files changed, 1410 insertions(+), 1149 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/pom.xml b/ambari-logsearch/ambari-logsearch-it/pom.xml
index d918c8a..373068e 100644
--- a/ambari-logsearch/ambari-logsearch-it/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-it/pom.xml
@@ -82,6 +82,11 @@
       <artifactId>slf4j-log4j12</artifactId>
       <version>1.7.10</version>
     </dependency>
+    <dependency>
+      <groupId>com.flipkart.zjsonpatch</groupId>
+      <artifactId>zjsonpatch</artifactId>
+      <version>0.2.4</version>
+    </dependency>
   </dependencies>
 
   <build>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java
new file mode 100644
index 0000000..bcfc4db
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchApiSteps.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.steps;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.flipkart.zjsonpatch.JsonDiff;
+import com.google.common.io.Resources;
+import org.apache.ambari.logsearch.domain.StoryDataRegistry;
+import org.glassfish.jersey.client.JerseyClient;
+import org.glassfish.jersey.client.JerseyClientBuilder;
+import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
+import org.jbehave.core.annotations.Named;
+import org.jbehave.core.annotations.Then;
+import org.jbehave.core.annotations.When;
+import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.client.Invocation;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class LogSearchApiSteps {
+
+  private static Logger LOG = LoggerFactory.getLogger(LogSearchApiSteps.class);
+
+  private String response;
+
+  @When("LogSearch api query sent: <apiQuery>")
+  public void sendApiQuery(@Named("apiQuery") String apiQuery) {
+    JerseyClient jerseyClient = JerseyClientBuilder.createClient();
+    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder()
+      .credentials("admin", "admin")
+      .build();
+    jerseyClient.register(authFeature);
+
+    String logsearchUrl = String.format("http://%s:%d%s",
+      StoryDataRegistry.INSTANCE.getDockerHost(),
+      StoryDataRegistry.INSTANCE.getLogsearchPort(),
+      apiQuery);
+
+    LOG.info("Url: {}", logsearchUrl);
+
+    WebTarget target = jerseyClient.target(logsearchUrl);
+    Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
+    response = invocationBuilder.get().readEntity(String.class);
+  }
+
+
+  @Then("The api query result is <jsonResult>")
+  public void verifyRestApiCall(@Named("jsonResult") String jsonResult) throws IOException, URISyntaxException {
+    ObjectMapper mapper = new ObjectMapper();
+    Path jsonFilePath = new File(Resources.getResource("test-output/" + jsonResult).toURI()).toPath();
+    String jsonExpected = new String(Files.readAllBytes(jsonFilePath));
+
+    JsonNode expected = mapper.readTree(jsonExpected);
+    JsonNode result = mapper.readTree(response);
+    JsonNode patch = JsonDiff.asJson(expected, result);
+    List<Object> diffObjects = mapper.convertValue(patch, List.class);
+    assertDiffs(diffObjects, expected);
+
+  }
+
+  private void assertDiffs(List<Object> diffObjects, JsonNode expected) {
+    for (Object diffObj : diffObjects) {
+      String path = ((Map<String, String>) diffObj).get("path");
+      Assert.assertTrue(expected.at(path).isMissingNode());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
index e1e5b99..7c72ca7 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/SolrSteps.java
@@ -22,25 +22,24 @@ import org.apache.ambari.logsearch.domain.StoryDataRegistry;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocumentList;
-import org.jbehave.core.annotations.Given;
+import org.jbehave.core.annotations.Named;
 import org.jbehave.core.annotations.Then;
-import org.jbehave.core.annotations.When;
-import org.jbehave.core.model.Story;
 import org.junit.Assert;
 
 import java.io.IOException;
 
 public class SolrSteps {
 
-  @Then("the number of $component docs is: $docSize")
-  public void numberOfDocsForComponent(String component, int docSize) throws IOException, SolrServerException {
+  @Then("the number of <component> docs is: <docSize>")
+  public void numberOfDocsForComponent(@Named("component") String component, @Named("docSize") int docSize)
+    throws IOException, SolrServerException, InterruptedException {
     SolrClient solrClient = StoryDataRegistry.INSTANCE.getCloudSolrClient();
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery(String.format("type:%s", component));
+    solrQuery.setStart(0);
+    solrQuery.setRows(20);
     QueryResponse queryResponse = solrClient.query(StoryDataRegistry.INSTANCE.getServiceLogsCollection(), solrQuery);
     SolrDocumentList list = queryResponse.getResults();
     Assert.assertEquals(docSize, list.size());

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java
new file mode 100644
index 0000000..45455bf
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchApiQueryStory.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.story;
+
+public class LogSearchApiQueryStory extends LogSearchStory {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
index f85eb08..ce6b9cb 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/story/LogSearchStory.java
@@ -18,6 +18,7 @@
  */
 package org.apache.ambari.logsearch.story;
 
+import org.apache.ambari.logsearch.steps.LogSearchApiSteps;
 import org.apache.ambari.logsearch.steps.SolrSteps;
 import org.apache.ambari.logsearch.steps.LogSearchDockerSteps;
 import org.jbehave.core.configuration.Configuration;
@@ -47,7 +48,8 @@ abstract public class LogSearchStory extends JUnitStory {
   public InjectableStepsFactory stepsFactory() {
     return new InstanceStepsFactory(configuration(),
       new LogSearchDockerSteps(),
-      new SolrSteps());
+      new SolrSteps(),
+      new LogSearchApiSteps());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story
new file mode 100644
index 0000000..5abe8b4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/log_search_api_query_story.story
@@ -0,0 +1,17 @@
+Meta:
+
+Narrative:
+As a user
+I want to perform queries against Log Search api
+So that I can validate the json outputs
+
+Scenario: scenario description
+
+Given logsearch docker container
+When LogSearch api query sent: <apiQuery>
+Then The api query result is <jsonResult>
+
+Examples:
+|apiQuery|jsonResult|
+|/api/v1/service/logs/schema/fields|service-log-schema.json|
+|/api/v1/service/logs/levels/counts/namevalues?page=0&pageSize=25&startIndex=0&q=*%3A*|service-log-level-counts-values.json|
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story
index a40d04a..388e624 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/org/apache/ambari/logsearch/story/logfeeder_parsing_story.story
@@ -5,14 +5,16 @@ As a user
 I want to start logsearch/logfeeder/solr components in a docker container with test logs
 So that I can parse and store the logs into Solr
 
-Scenario: Logsearch logs are stored into Solr.
+Scenario: Number of logs for components
 
 Given logsearch docker container
 When logfeeder started (parse logs & send data to solr)
-Then the number of logsearch_app docs is: 1
+Then the number of <component> docs is: <docSize>
 
-Scenario: Zookeeper logs are stored into Solr.
-
-Given logsearch docker container
-When logfeeder started (parse logs & send data to solr)
-Then the number of zookeeper docs is: 3
+Examples:
+|component|docSize|
+|logsearch_app|1|
+|zookeeper|3|
+|hst_agent|4|
+|secure_log|11|
+|system_message|17|

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json
new file mode 100644
index 0000000..0ef05e8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-level-counts-values.json
@@ -0,0 +1,33 @@
+{
+  "vNameValues": [
+    {
+      "name": "FATAL",
+      "value": "0"
+    },
+    {
+      "name": "ERROR",
+      "value": "0"
+    },
+    {
+      "name": "WARN",
+      "value": "3"
+    },
+    {
+      "name": "INFO",
+      "value": "4"
+    },
+    {
+      "name": "DEBUG",
+      "value": "1"
+    },
+    {
+      "name": "TRACE",
+      "value": "0"
+    },
+    {
+      "name": "UNKNOWN",
+      "value": "28"
+    }
+  ],
+  "listSize": 7
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json
new file mode 100644
index 0000000..9efa58b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/test-output/service-log-schema.json
@@ -0,0 +1,18 @@
+{
+  "log_message": "Log Message",
+  "bundle_id": "Bundle Id",
+  "case_id": "Case Id",
+  "cluster": "Cluster",
+  "event_count": "Event Count",
+  "file": "File",
+  "host": "Host",
+  "id": "Id",
+  "ip": "IP",
+  "level": "Level",
+  "logfile_line_number": "Logfile Line Number",
+  "logger_name": "Logger Name",
+  "method": "method",
+  "path": "Path",
+  "text": "Text",
+  "type": "Type"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index 47f139d..53cb6c0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -77,17 +77,17 @@ public class OutputSolr extends Output {
 
   private BlockingQueue<OutputData> outgoingBuffer = null;
   private List<SolrWorkerThread> workerThreadList = new ArrayList<>();
-  
+
   @Override
   protected String getStatMetricName() {
     return "output.solr.write_logs";
   }
-  
+
   @Override
   protected String getWriteBytesMetricName() {
     return "output.solr.write_bytes";
   }
-  
+
   @Override
   public void init() throws Exception {
     super.init();
@@ -263,6 +263,9 @@ public class OutputSolr extends Output {
   private void useActualDateIfNeeded(Map<String, Object> jsonObj) {
     if (skipLogtime) {
       jsonObj.put("logtime", DateUtil.getActualDateStr());
+      if (jsonObj.get("evtTime") != null) {
+        jsonObj.put("evtTime", DateUtil.getActualDateStr());
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index ebba82c..a1d1bfc 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -36,6 +36,7 @@
     <jersey.version>2.23.2</jersey.version>
     <jetty-version>9.2.11.v20150529</jetty-version>
     <swagger.version>1.5.8</swagger.version>
+    <spring-data-solr.version>2.0.2.RELEASE</spring-data-solr.version>
   </properties>
   <profiles>
     <!-- Dev Profile Start -->
@@ -751,5 +752,10 @@
       <artifactId>swagger-ui</artifactId>
       <version>2.1.0</version>
     </dependency>
+    <dependency>
+      <groupId>org.springframework.data</groupId>
+      <artifactId>spring-data-solr</artifactId>
+      <version>${spring-data-solr.version}</version>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 27b943a..75d8be3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -24,11 +24,11 @@ import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.util.EnumSet;
 
-import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.solr.metrics.SolrMetricsLoader;
+import org.apache.ambari.logsearch.conf.ApplicationConfig;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.eclipse.jetty.server.Connector;
@@ -40,10 +40,18 @@ import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.server.SslConnectionFactory;
 import org.eclipse.jetty.server.handler.HandlerList;
 import org.eclipse.jetty.server.handler.ResourceHandler;
+import org.eclipse.jetty.servlet.FilterHolder;
 import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.util.resource.Resource;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.webapp.WebAppContext;
+import org.springframework.web.context.ContextLoaderListener;
+import org.springframework.web.context.request.RequestContextListener;
+import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
+import org.springframework.web.filter.DelegatingFilterProxy;
+
+import javax.servlet.DispatcherType;
 
 public class LogSearch {
   private static final Logger logger = Logger.getLogger(LogSearch.class);
@@ -65,6 +73,7 @@ public class LogSearch {
   
   private static final String WEB_RESOURCE_FOLDER = "webapps/app";
   private static final String ROOT_CONTEXT = "/";
+  private static final Integer SESSION_TIMEOUT = 30;
 
  
   public static void main(String[] argv) {
@@ -75,7 +84,6 @@ public class LogSearch {
     } catch (Throwable e) {
       logger.error("Error running logsearch server", e);
     }
-    SolrMetricsLoader.startSolrMetricsLoaderTasks();
   }
   
   public void run(String[] argv) throws Exception {
@@ -150,6 +158,21 @@ public class LogSearch {
     context.setBaseResource(Resource.newResource(webResourceBase));
     context.setContextPath(ROOT_CONTEXT);
     context.setParentLoaderPriority(true);
+
+    // Configure Spring
+    context.addEventListener(new ContextLoaderListener());
+    context.addEventListener(new RequestContextListener());
+    context.addFilter(new FilterHolder(new DelegatingFilterProxy("springSecurityFilterChain")), "/*", EnumSet.allOf(DispatcherType.class));
+    context.setInitParameter("contextClass", AnnotationConfigWebApplicationContext.class.getName());
+    context.setInitParameter("contextConfigLocation", ApplicationConfig.class.getName());
+
+    // Configure Jersey
+    ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/api/v1/*");
+    jerseyServlet.setInitOrder(1);
+    jerseyServlet.setInitParameter("jersey.config.server.provider.packages","org.apache.ambari.logsearch.rest,io.swagger.jaxrs.listing");
+
+    context.getSessionHandler().getSessionManager().setMaxInactiveInterval(SESSION_TIMEOUT);
+
     return context;
   }
 
@@ -167,18 +190,19 @@ public class LogSearch {
   private URI findWebResourceBase() {
     URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
         .getResource(WEB_RESOURCE_FOLDER);
+    String errorMessage = "Web Resource Folder " + WEB_RESOURCE_FOLDER+ " not found in classpath";
     if (fileCompleteUrl != null) {
       try {
         return fileCompleteUrl.toURI().normalize();
       } catch (URISyntaxException e) {
-        logger.error("Web Resource Folder " + WEB_RESOURCE_FOLDER+ " not found in classpath", e);
+        logger.error(errorMessage, e);
         System.exit(1);
       }
-    }else{
-      logger.error("Web Resource Folder " + WEB_RESOURCE_FOLDER+ " not found in classpath");
+    } else {
+      logger.error(errorMessage);
       System.exit(1);
     }
-    return null;
+    throw new IllegalStateException(errorMessage);
   }
 
   private void checkPort(int port) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
index edc78d1..2590dfe 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logsearch.common;
 import java.util.List;
 import java.util.Map;
 
-import javax.annotation.PostConstruct;
 import javax.inject.Inject;
 import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
@@ -29,7 +28,6 @@ import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.ambari.logsearch.conf.AuthConfig;
-import org.apache.ambari.logsearch.web.security.LogsearchAbstractAuthenticationProvider;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.glassfish.jersey.client.JerseyClient;
@@ -55,23 +53,18 @@ public class ExternalServerClient {
   @Inject
   private AuthConfig authConfig;
 
-  private boolean enableLog = false;// default
-
   /**
    * Send GET request to an external server
    */
-  @SuppressWarnings({ "unchecked", "rawtypes" })
   public Object sendGETRequest(String url, Class klass, MultivaluedMap<String, String> queryParam,
                                String username, String password)
       throws Exception {
     url = authConfig.getExternalAuthHostUrl() + url;
     JerseyClient client = localJerseyClient.get();
-    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder().build();
-
+    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder()
+      .credentials(username, password)
+      .build();
     client.register(authFeature);
-    if (enableLog) {
-      client.register(LoggingFilter.class);
-    }
 
     WebTarget target = client.target(url);
     LOG.debug("URL: " + url);
@@ -80,9 +73,6 @@ public class ExternalServerClient {
       LOG.debug(
         String.format("Query parameter: name - %s  ; value - %s ;" + entry.getKey(), StringUtils.join(entry.getValue(),',')));
     }
-    target
-      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_USERNAME, username)
-      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_PASSWORD, password);
     Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
     try {
       return invocationBuilder.get().readEntity(klass);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
index 72ea942..b279a83 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
@@ -18,38 +18,16 @@
  */
 package org.apache.ambari.logsearch.conf;
 
-import org.apache.ambari.logsearch.query.converter.AnyGraphRequestConverter;
-import org.apache.ambari.logsearch.query.converter.AuditBarGraphRequestConverter;
-import org.apache.ambari.logsearch.query.converter.AuditLogRequestConverter;
-import org.apache.ambari.logsearch.query.converter.BaseAuditLogRequestConverter;
-import org.apache.ambari.logsearch.query.converter.BaseServiceLogRequestConverter;
-import org.apache.ambari.logsearch.query.converter.FieldAuditLogRequestConverter;
-import org.apache.ambari.logsearch.query.converter.FieldBarGraphRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceAnyGraphRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceExtremeDatesRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceGraphRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceLogExportRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceLogFileRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceLogRequestConverter;
-import org.apache.ambari.logsearch.query.converter.ServiceLogTruncatedRequestConverter;
-import org.apache.ambari.logsearch.query.converter.SimpleQueryRequestConverter;
-import org.apache.ambari.logsearch.query.converter.UserExportRequestConverter;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.ComponentScan;
 import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.ImportResource;
 import org.springframework.context.annotation.PropertySource;
 import org.springframework.context.support.ConversionServiceFactoryBean;
 import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
-import org.springframework.core.convert.converter.Converter;
-
-import java.util.HashSet;
-import java.util.Set;
 
 @Configuration
 @ComponentScan("org.apache.ambari.logsearch")
 @PropertySource(value = {"classpath:default.properties", "classpath:logsearch.properties"})
-@ImportResource("META-INF/security-applicationContext.xml")
 public class ApplicationConfig {
 
   @Bean
@@ -60,24 +38,7 @@ public class ApplicationConfig {
   @Bean(name="conversionService")
   public ConversionServiceFactoryBean conversionServiceFactoryBean() {
     ConversionServiceFactoryBean conversionServiceFactoryBean = new ConversionServiceFactoryBean();
-    Set<Converter> converters = new HashSet<>();
-    converters.add(new AnyGraphRequestConverter());
-    converters.add(new AuditBarGraphRequestConverter());
-    converters.add(new AuditLogRequestConverter());
-    converters.add(new BaseAuditLogRequestConverter());
-    converters.add(new BaseServiceLogRequestConverter());
-    converters.add(new FieldAuditLogRequestConverter());
-    converters.add(new FieldBarGraphRequestConverter());
-    converters.add(new SimpleQueryRequestConverter());
-    converters.add(new UserExportRequestConverter());
-    converters.add(new ServiceAnyGraphRequestConverter());
-    converters.add(new ServiceExtremeDatesRequestConverter());
-    converters.add(new ServiceGraphRequestConverter());
-    converters.add(new ServiceLogExportRequestConverter());
-    converters.add(new ServiceLogFileRequestConverter());
-    converters.add(new ServiceLogRequestConverter());
-    converters.add(new ServiceLogTruncatedRequestConverter());
-    conversionServiceFactoryBean.setConverters(converters);
+    conversionServiceFactoryBean.afterPropertiesSet();
     return conversionServiceFactoryBean;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
index 3398a83..46d07bd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
@@ -30,9 +30,11 @@ public class AuthConfig {
   boolean authLdapEnabled;
   @Value("${logsearch.auth.simple.enable:false}")
   boolean authSimpleEnabled;
+  @Value("${logsearch.auth.external_auth.enable:false}")
+  boolean authExternalEnabled;
   @Value("${logsearch.auth.external_auth.host_url:'http://ip:port'}")
   private String externalAuthHostUrl;
-  @Value("${logsearch.auth.login_url:/api/v1/users/$USERNAME/privileges?fields=*}")
+  @Value("${logsearch.auth.external_auth.login_url:/api/v1/users/$USERNAME/privileges?fields=*}")
   private String externalAuthLoginUrl;
   @Value("${logsearch.login.credentials.file:user_pass.json}")
   private String credentialsFile;
@@ -84,4 +86,12 @@ public class AuthConfig {
   public void setExternalAuthLoginUrl(String externalAuthLoginUrl) {
     this.externalAuthLoginUrl = externalAuthLoginUrl;
   }
+
+  public boolean isAuthExternalEnabled() {
+    return authExternalEnabled;
+  }
+
+  public void setAuthExternalEnabled(boolean authExternalEnabled) {
+    this.authExternalEnabled = authExternalEnabled;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
new file mode 100644
index 0000000..d3db110
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SecurityConfig.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.apache.ambari.logsearch.web.authenticate.LogsearchAuthFailureHandler;
+import org.apache.ambari.logsearch.web.authenticate.LogsearchAuthSuccessHandler;
+import org.apache.ambari.logsearch.web.authenticate.LogsearchLogoutSuccessHandler;
+import org.apache.ambari.logsearch.web.filters.LogsearchAuthenticationEntryPoint;
+import org.apache.ambari.logsearch.web.filters.LogsearchKRBAuthenticationFilter;
+import org.apache.ambari.logsearch.web.filters.LogsearchSecurityContextFormationFilter;
+import org.apache.ambari.logsearch.web.filters.LogsearchUsernamePasswordAuthenticationFilter;
+import org.apache.ambari.logsearch.web.security.LogsearchAuthenticationProvider;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
+import org.springframework.security.config.http.SessionCreationPolicy;
+import org.springframework.security.web.access.intercept.FilterSecurityInterceptor;
+import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
+
+@Configuration
+@EnableWebSecurity
+public class SecurityConfig extends WebSecurityConfigurerAdapter {
+
+  @Override
+  protected void configure(HttpSecurity http) throws Exception {
+    http
+      .csrf().disable()
+      .sessionManagement()
+         .sessionFixation()
+         .newSession()
+         .sessionCreationPolicy(SessionCreationPolicy.ALWAYS)
+      .and()
+      .authorizeRequests()
+        .antMatchers("/login.html").permitAll()
+        .antMatchers("/styles/**").permitAll()
+        .antMatchers("/fonts/**").permitAll()
+        .antMatchers("/fonts/**").permitAll()
+        .antMatchers("/scripts/**").permitAll()
+        .antMatchers("/libs/**").permitAll()
+        .antMatchers("/images/**").permitAll()
+        .antMatchers("/templates/**").permitAll()
+        .antMatchers("/favicon.ico").permitAll()
+        .antMatchers("/api/v1/public/**").permitAll()
+        .antMatchers("/api/v1/swagger.json").permitAll()
+        .antMatchers("/**").authenticated()
+      .and()
+      .authenticationProvider(logsearchAuthenticationProvider())
+        .formLogin()
+        .loginPage("/login.html")
+      .and()
+      .httpBasic()
+        .authenticationEntryPoint(logsearchAuthenticationEntryPoint())
+      .and()
+      .addFilterBefore(logsearchUsernamePasswordAuthenticationFilter(), UsernamePasswordAuthenticationFilter.class)
+      .addFilterBefore(new LogsearchKRBAuthenticationFilter(), UsernamePasswordAuthenticationFilter.class)
+      .addFilterAfter(securityContextFormationFilter(), FilterSecurityInterceptor.class)
+      .logout()
+        .logoutUrl("/logout.html")
+        .deleteCookies("JSESSIONID")
+        .logoutSuccessHandler(new LogsearchLogoutSuccessHandler());
+  }
+
+  @Bean
+  public LogsearchSecurityContextFormationFilter securityContextFormationFilter() {
+    return new LogsearchSecurityContextFormationFilter();
+  }
+
+  @Bean
+  public LogsearchKRBAuthenticationFilter logsearchKRBAuthenticationFilter() {
+    return new LogsearchKRBAuthenticationFilter();
+  }
+
+  @Bean
+  public LogsearchAuthenticationProvider logsearchAuthenticationProvider() {
+    return new LogsearchAuthenticationProvider();
+  }
+
+  @Bean
+  public LogsearchAuthenticationEntryPoint logsearchAuthenticationEntryPoint() {
+    LogsearchAuthenticationEntryPoint entryPoint = new LogsearchAuthenticationEntryPoint("/login.html");
+    entryPoint.setForceHttps(false);
+    return entryPoint;
+  }
+
+  @Bean
+  public LogsearchUsernamePasswordAuthenticationFilter logsearchUsernamePasswordAuthenticationFilter() throws Exception {
+    LogsearchUsernamePasswordAuthenticationFilter filter = new LogsearchUsernamePasswordAuthenticationFilter();
+    filter.setAuthenticationSuccessHandler(new LogsearchAuthSuccessHandler());
+    filter.setAuthenticationFailureHandler(new LogsearchAuthFailureHandler());
+    filter.setAuthenticationManager(authenticationManagerBean());
+    return filter;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index b325171..ac7f56f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -29,7 +29,6 @@ import java.util.List;
 import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
 import org.apache.ambari.logsearch.conf.SolrUserConfig;
 import org.apache.ambari.logsearch.manager.ManagerBase.LogType;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index a04dee4..4ca9df6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -52,9 +52,6 @@ public class UserDao {
   private static final String NAME = "name";
 
   @Inject
-  private LogsearchFileAuthenticationProvider fileAuthenticationProvider;
-
-  @Inject
   private AuthConfig authConfig;
 
   private ArrayList<HashMap<String, String>> userList = null;
@@ -62,7 +59,7 @@ public class UserDao {
   @SuppressWarnings("unchecked")
   @PostConstruct
   public void initialization() {
-    if (fileAuthenticationProvider.isEnable()) {
+    if (authConfig.isAuthFileEnabled()) {
       try {
         String userPassJsonFileName = authConfig.getCredentialsFile();
         logger.info("USER PASS JSON  file NAME:" + userPassJsonFileName);
@@ -151,12 +148,4 @@ public class UserDao {
     }
     return isUpdated;
   }
-  
-  public String encryptPassword(String username, String password) {
-    if (!StringUtils.isEmpty(username)) {
-      username = username.toLowerCase();
-    }
-    String saltEncodedpasswd = CommonUtil.encryptPassword(password, username);
-    return saltEncodedpasswd;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
index 7affc5a..c64cf71 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
@@ -49,6 +49,12 @@ import org.apache.ambari.logsearch.model.response.LogData;
 import org.apache.ambari.logsearch.model.response.LogSearchResponse;
 import org.apache.ambari.logsearch.model.response.NameValueData;
 import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.query.model.AnyGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.AuditBarGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.FieldAuditBarGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.FieldAuditLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.UserExportSearchCriteria;
 import org.apache.ambari.logsearch.solr.model.SolrAuditLogData;
 import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
 import org.apache.ambari.logsearch.util.BizUtil;
@@ -82,7 +88,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
   private SolrAuditLogConfig solrAuditLogConfig;
 
   public AuditLogResponse getLogs(AuditLogSearchCriteria searchCriteria) {
-    Boolean isLastPage = (Boolean) searchCriteria.getParamValue("isLastPage");
+    Boolean isLastPage = searchCriteria.isLastPage();
     if (isLastPage) {
       SolrQuery lastPageQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
       LogSearchResponse logResponse = getLastPage(searchCriteria, LogSearchConstants.AUDIT_EVTTIME, auditSolrDao, lastPageQuery);
@@ -95,7 +101,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     return getLogAsPaginationProvided(solrQuery, auditSolrDao);
   }
 
-  private List<LogData> getComponents(SearchCriteria searchCriteria) {
+  private List<LogData> getComponents(CommonSearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     List<LogData> docList = new ArrayList<>();
     try {
@@ -133,7 +139,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     }
   }
 
-  public GroupListResponse getAuditComponents(SearchCriteria searchCriteria) {
+  public GroupListResponse getAuditComponents(CommonSearchCriteria searchCriteria) {
     GroupListResponse componentResponse = new GroupListResponse();
     List<LogData> docList = getComponents(searchCriteria);
     componentResponse.setGroupList(docList);
@@ -141,13 +147,13 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
   }
 
   @SuppressWarnings("unchecked")
-  public BarGraphDataListResponse getAuditBarGraphData(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getAuditBarGraphData(AuditBarGraphSearchCriteria searchCriteria) {
     BarGraphDataListResponse dataList = new BarGraphDataListResponse();
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
-    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
-    String to = getTo((String) searchCriteria.getParamValue("endTime"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+    String from = getFrom(searchCriteria.getStartTime());
+    String to = getTo(searchCriteria.getEndTime());
+    String unit = getUnit(searchCriteria.getUnit());
 
     List<BarGraphData> histogramData = new ArrayList<BarGraphData>();
     String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_COMPONENT,
@@ -225,7 +231,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     }
   }
 
-  public BarGraphDataListResponse topTenUsers(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse topTenUsers(FieldAuditBarGraphSearchCriteria searchCriteria) {
 
     String jsonUserQuery =
       "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
@@ -262,7 +268,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     }
   }
 
-  public BarGraphDataListResponse topTenResources(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse topTenResources(FieldAuditLogSearchCriteria searchCriteria) {
 
     String jsonUserQuery =
       "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
@@ -294,11 +300,11 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
   }
 
   @SuppressWarnings("unchecked")
-  public BarGraphDataListResponse getRequestUserLineGraph(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getRequestUserLineGraph(FieldAuditBarGraphSearchCriteria searchCriteria) {
 
-    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
-    String to = getTo((String) searchCriteria.getParamValue("endTime"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+    String from = getFrom(searchCriteria.getStartTime());
+    String to = getTo(searchCriteria.getEndTime());
+    String unit = getUnit(searchCriteria.getUnit());
 
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
 
@@ -355,7 +361,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
 
   }
 
-  public BarGraphDataListResponse getAnyGraphData(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getAnyGraphData(AnyGraphSearchCriteria searchCriteria) {
     searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     BarGraphDataListResponse result = graphDataGenerator.getAnyGraphData(searchCriteria, auditSolrDao, solrQuery);
@@ -396,13 +402,13 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
   }
 
   @SuppressWarnings({"unchecked"})
-  public Response exportUserTableToTextFile(SearchCriteria searchCriteria) {
+  public Response exportUserTableToTextFile(UserExportSearchCriteria searchCriteria) {
     String jsonUserQuery =
       "{ Users: { type: terms, field: reqUser, facet:  {Repo: {  type: terms, field: repo, facet: {  eventCount: \"sum(event_count)\"}}}},x:{ type: terms,field: resource, facet: {y: {  type: terms, field: repo,facet: {  eventCount: \"sum(event_count)\"}}}}}";
 
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    String startTime = (String) searchCriteria.getParamValue("startTime");
-    String endTime = (String) searchCriteria.getParamValue("endTime");
+    String startTime = searchCriteria.getStartTime();
+    String endTime = searchCriteria.getEndTime();
 
     startTime = startTime == null ? "" : startTime;
     endTime = endTime == null ? "" : "_" + endTime;
@@ -410,7 +416,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
     SolrUtil.setRowCount(solrQuery, 0);
 
-    String dataFormat = (String) searchCriteria.getParamValue("format");
+    String dataFormat = searchCriteria.getFormat();
     FileOutputStream fis = null;
     try {
       QueryResponse queryResponse = auditSolrDao.process(solrQuery);
@@ -543,7 +549,7 @@ public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResp
     return fieldWithBlank;
   }
 
-  public BarGraphDataListResponse getServiceLoad(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getServiceLoad(CommonSearchCriteria searchCriteria) {
     BarGraphDataListResponse dataList = new BarGraphDataListResponse();
     Collection<BarGraphData> vaDatas = new ArrayList<BarGraphData>();
     dataList.setGraphData(vaDatas);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
index 23f62aa..3ac2be6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
@@ -22,9 +22,9 @@ package org.apache.ambari.logsearch.manager;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.ambari.logsearch.model.response.NameValueData;
 import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
-import org.apache.ambari.logsearch.web.security.LogsearchSimpleAuthenticationProvider;
 import org.springframework.stereotype.Component;
 
 import javax.inject.Inject;
@@ -33,14 +33,14 @@ import javax.inject.Inject;
 public class PublicManager extends JsonManagerBase {
 
   @Inject
-  private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
+  private AuthConfig authConfig;
 
   public String getGeneralConfig() {
     NameValueDataListResponse nameValueList = new NameValueDataListResponse();
     List<NameValueData> nameValues = new ArrayList<>();
     NameValueData nameValue = new NameValueData();
     nameValue.setName("simpleAuth");
-    nameValue.setValue("" + simpleAuthenticationProvider.isEnable());
+    nameValue.setValue("" + authConfig.isAuthSimpleEnabled());
     nameValues.add(nameValue);
     nameValueList.setvNameValues(nameValues);
     return convertObjToString(nameValueList);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
index c4d14a9..02309fc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
@@ -43,7 +43,6 @@ import javax.ws.rs.core.Response;
 import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.graph.GraphDataGenerator;
@@ -64,6 +63,13 @@ import org.apache.ambari.logsearch.model.response.NodeListResponse;
 import org.apache.ambari.logsearch.model.response.ServiceLogData;
 import org.apache.ambari.logsearch.model.response.ServiceLogResponse;
 import org.apache.ambari.logsearch.query.QueryGenerationBase;
+import org.apache.ambari.logsearch.query.model.CommonServiceLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceAnyGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceGraphSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogExportSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogFileSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.ServiceLogTruncatedSearchCriteria;
 import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
 import org.apache.ambari.logsearch.solr.model.SolrHostLogData;
 import org.apache.ambari.logsearch.solr.model.SolrServiceLogData;
@@ -83,7 +89,6 @@ import org.apache.solr.client.solrj.response.FacetField;
 import org.apache.solr.client.solrj.response.FacetField.Count;
 import org.apache.solr.client.solrj.response.PivotField;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.client.solrj.response.RangeFacet;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
@@ -114,10 +119,10 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
   @Inject
   private SolrServiceLogConfig solrServiceLogConfig;
 
-  public ServiceLogResponse searchLogs(SearchCriteria searchCriteria) {
-    String keyword = (String) searchCriteria.getParamValue("keyword");
-    String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    Boolean isLastPage = (Boolean) searchCriteria.getParamValue("isLastPage");
+  public ServiceLogResponse searchLogs(ServiceLogSearchCriteria searchCriteria) {
+    String keyword = searchCriteria.getKeyword();
+    String logId = searchCriteria.getSourceLogId();
+    Boolean isLastPage = searchCriteria.isLastPage();
 
     if (!StringUtils.isBlank(keyword)) {
       try {
@@ -203,7 +208,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     return getFields(LogSearchConstants.SOLR_COMPONENT, SolrComponentTypeLogData.class);
   }
 
-  public GraphDataListResponse getAggregatedInfo(SearchCriteria searchCriteria) {
+  public GraphDataListResponse getAggregatedInfo(CommonServiceLogSearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     String hierarchy = "host,type,level";
     GraphDataListResponse graphInfo = new GraphDataListResponse();
@@ -401,7 +406,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     return extensionTree;
   }
 
-  public NodeListResponse getTreeExtension(SearchCriteria searchCriteria) {
+  public NodeListResponse getTreeExtension(ServiceLogFileSearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/getTreeExtension");
 
@@ -464,7 +469,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     return list;
   }
 
-  public NodeListResponse getHostListByComponent(SearchCriteria searchCriteria) {
+  public NodeListResponse getHostListByComponent(ServiceLogFileSearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/service/hosts/components");
 
@@ -524,7 +529,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
   }
 
-  public NameValueDataListResponse getLogsLevelCount(SearchCriteria sc) {
+  public NameValueDataListResponse getLogsLevelCount(ServiceLogFileSearchCriteria sc) {
     NameValueDataListResponse nameValueList = new NameValueDataListResponse();
     SolrQuery query = queryGenerator.commonServiceFilterQuery(sc);
     query.setParam("event", "/service/logs/levels/counts/namevalues");
@@ -584,11 +589,11 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     return list;
   }
 
-  public LogListResponse getPageByKeyword(SearchCriteria searchCriteria)
+  public LogListResponse getPageByKeyword(ServiceLogSearchCriteria searchCriteria)
     throws SolrServerException {
     String defaultChoice = "0";
 
-    String key = (String) searchCriteria.getParamValue("keyword");
+    String key = (String) searchCriteria.getKeyword();
     if(StringUtils.isBlank(key)){
       throw RESTErrorUtil.createRESTException("Keyword was not given",
           MessageEnums.DATA_NOT_FOUND);
@@ -603,7 +608,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     keyword = "*" + keyword + "*";
 
 
-    String keyType = (String) searchCriteria.getParamValue("keywordType");
+    String keyType = (String) searchCriteria.getKeywordType();
     QueryResponse queryResponse = null;
 
     if (!defaultChoice.equals(keyType)) {
@@ -689,9 +694,8 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
         }
 
         // Keyword Sequence Number Calculation
-        String endTime = (String) searchCriteria.getParamValue("to");
-        String startTime = (String) searchCriteria
-          .getParamValue("from");
+        String endTime = searchCriteria.getTo();
+        String startTime = searchCriteria.getFrom();
         SolrQuery logTimeThroughRangeQuery = queryGenerator
           .commonServiceFilterQuery(searchCriteria);
         logTimeThroughRangeQuery.remove("start");
@@ -809,8 +813,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
         rangeLogQuery.remove("rows");
         logIdQuery.setStart(start);
         logIdQuery.setRows(searchCriteria.getMaxRows());
-        LogListResponse logResponse = getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
-        return logResponse;
+        return getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
 
       } catch (Exception e) {
         //do nothing
@@ -899,9 +902,8 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
 
 
         // Keyword LogTime Calculation
-        String endTime = (String) searchCriteria.getParamValue("to");
-        String startTime = (String) searchCriteria
-          .getParamValue("from");
+        String endTime = (String) searchCriteria.getTo();
+        String startTime = searchCriteria.getFrom();
         SolrQuery logTimeThroughRangeQuery = queryGenerator
           .commonServiceFilterQuery(searchCriteria);
         logTimeThroughRangeQuery.remove("start");
@@ -1034,7 +1036,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
         MessageEnums.ERROR_SYSTEM);
   }
 
-  private LogSearchResponse getPageByLogId(SearchCriteria searchCriteria) {
+  private LogSearchResponse getPageByLogId(ServiceLogSearchCriteria searchCriteria) {
     LogSearchResponse logResponse = new ServiceLogResponse();
     String endLogTime = (String) searchCriteria.getParamValue("to");
     if(StringUtils.isBlank(endLogTime)){
@@ -1135,68 +1137,14 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
   }
 
   @SuppressWarnings("unchecked")
-  public List<NameValueData> getHistogramCounts(SolrQuery solrQuery,
-                                             String from, String to, String unit) {
-    List<NameValueData> logsCounts = new ArrayList<>();
-    try {
-
-      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
-        from, to, unit);
-
-      List<RangeFacet.Count> logLevelCounts = null;
-
-      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-      if(response == null){
-        return logsCounts;
-      }
-      @SuppressWarnings("rawtypes")
-      List<RangeFacet> rangeFacetList = response.getFacetRanges();
-      if (rangeFacetList == null) {
-        return logsCounts;
-
-      }
-
-      @SuppressWarnings("rawtypes")
-      RangeFacet rangeFacet=rangeFacetList.get(0);
-      if (rangeFacet == null) {
-        return logsCounts;
-      }
-      logLevelCounts = rangeFacet.getCounts();
-
-      if(logLevelCounts == null){
-        return logsCounts;
-      }
-      for (RangeFacet.Count logCount : logLevelCounts) {
-        NameValueData nameValue = new NameValueData();
-        nameValue.setName(logCount.getValue());
-        nameValue.setValue("" + logCount.getCount());
-        logsCounts.add(nameValue);
-      }
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-    }
-    return logsCounts;
-  }
-
-  public List<Count> getFacetCountsByDate(SolrQuery solrQuery,
-                                          String facetField) throws SolrServerException, IOException,
-    SolrException {
-
-    QueryResponse response = serviceLogsSolrDao.process(solrQuery);
-
-    FacetField field = response.getFacetDate(facetField);
-    return field.getValues();
-  }
-
-  @SuppressWarnings("unchecked")
-  public BarGraphDataListResponse getHistogramData(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getHistogramData(ServiceGraphSearchCriteria searchCriteria) {
     String deafalutValue = "0";
     BarGraphDataListResponse dataList = new BarGraphDataListResponse();
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.set("event", "/audit/logs/histogram");
-    String from = getFrom((String) searchCriteria.getParamValue("from"));
-    String to = getTo((String) searchCriteria.getParamValue("to"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+    String from = getFrom(searchCriteria.getFrom());
+    String to = getTo(searchCriteria.getTo());
+    String unit = getUnit(searchCriteria.getUnit());
 
     List<BarGraphData> histogramData = new ArrayList<>();
 
@@ -1267,16 +1215,6 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
   }
 
-  public void arrangeLevel(String level,
-                           List<BarGraphData> histogramDataLocal,
-                           List<BarGraphData> histogramData) {
-    for (BarGraphData histData : histogramData) {
-      if (histData != null && level.equals(histData.getName())) {
-        histogramDataLocal.add(histData);
-      }
-    }
-  }
-
   public String cancelFindRequestByDate(String uniqueId) {
     if (StringUtils.isEmpty(uniqueId)) {
       logger.error("Unique id is Empty");
@@ -1305,13 +1243,13 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     return true;
   }
 
-  public Response exportToTextFile(SearchCriteria searchCriteria) {
+  public Response exportToTextFile(ServiceLogExportSearchCriteria searchCriteria) {
     String defaultFormat = "text";
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
-    String from = (String) searchCriteria.getParamValue("from");
-    String to = (String) searchCriteria.getParamValue("to");
-    String utcOffset = (String) searchCriteria.getParamValue("utcOffset");
-    String format = (String) searchCriteria.getParamValue("format");
+    String from = searchCriteria.getFrom();
+    String to = searchCriteria.getTo();
+    String utcOffset = searchCriteria.getUtcOffset();
+    String format = searchCriteria.getFormat();
 
     format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
         : ".json";
@@ -1449,7 +1387,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
   }
 
-  public NodeListResponse getComponentListWithLevelCounts(SearchCriteria searchCriteria) {
+  public NodeListResponse getComponentListWithLevelCounts(ServiceLogFileSearchCriteria searchCriteria) {
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     solrQuery.setParam("event", "/service/logs/components/levels/counts");
 
@@ -1670,7 +1608,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
   }
 
-  public BarGraphDataListResponse getAnyGraphData(SearchCriteria searchCriteria) {
+  public BarGraphDataListResponse getAnyGraphData(ServiceAnyGraphSearchCriteria searchCriteria) {
     searchCriteria.addParam("fieldTime", LogSearchConstants.LOGTIME);
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     BarGraphDataListResponse result = graphDataGenerator.getAnyGraphData(searchCriteria, serviceLogsSolrDao, solrQuery);
@@ -1681,7 +1619,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
 
   }
 
-  public ServiceLogResponse getAfterBeforeLogs(SearchCriteria searchCriteria) {
+  public ServiceLogResponse getAfterBeforeLogs(ServiceLogTruncatedSearchCriteria searchCriteria) {
     ServiceLogResponse logResponse = new ServiceLogResponse();
     List<SolrServiceLogData> docList = null;
     String id = (String) searchCriteria
@@ -1692,11 +1630,11 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
     String maxRows = "";
 
-    maxRows = (String) searchCriteria.getParamValue("numberRows");
+    maxRows = searchCriteria.getNumberRows();
     if (StringUtils.isBlank(maxRows)){
       maxRows = ""+maxRows;
     }
-    String scrollType = (String) searchCriteria.getParamValue("scrollType");
+    String scrollType = searchCriteria.getScrollType();
     if(StringUtils.isBlank(scrollType)){
       scrollType = "";
     }
@@ -1803,7 +1741,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
     queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
 
-    return (ServiceLogResponse) getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
+    return getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
   }
 
   private ServiceLogResponse whenScrollDown(SearchCriteria searchCriteria,
@@ -1835,7 +1773,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
     queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
 
-    return (ServiceLogResponse) getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
+    return getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
   }
 
   @Scheduled(cron = "${logsearch.solr.warming.cron}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
index 6df56a1..41eca1e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
@@ -98,9 +98,9 @@ public interface AuditLogData extends CommonLogData {
   void setRepo(String repo);
 
   @JsonProperty("repoType")
-  String getRepoType();
+  Integer getRepoType();
 
-  void setRepoType(String repoType);
+  void setRepoType(Integer repoType);
 
   @JsonProperty("reqData")
   String getRequestData();

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index 10224bc..9ea29c2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -30,6 +30,9 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.conf.SolrAuditLogConfig;
 import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
+import org.apache.ambari.logsearch.query.model.AuditLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonServiceLogSearchCriteria;
 import org.apache.ambari.logsearch.query.model.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
@@ -59,32 +62,33 @@ public class QueryGeneration extends QueryGenerationBase {
   @Inject
   private SolrAuditLogConfig solrAuditLogConfig;
 
-  public SolrQuery commonServiceFilterQuery(SearchCriteria searchCriteria) {
+  public SolrQuery commonServiceFilterQuery(CommonServiceLogSearchCriteria searchCriteria) {
     LogType logType = LogType.SERVICE;
     SolrQuery solrQuery = new SolrQuery();
-    String treeParams = (String) searchCriteria.getParamValue("treeParams");
-    String givenQuery = (String) searchCriteria.getParamValue("q");
-    String level = (String) searchCriteria.getParamValue("level");
-    String startTime = (String) searchCriteria.getParamValue("from");
-    String endTime = (String) searchCriteria.getParamValue("to");
-    String iMessage = (String) searchCriteria.getParamValue("iMessage");
-    String eMessage = (String) searchCriteria.getParamValue("eMessage");
+    String advQuery = (String) searchCriteria.getParamValue("advanceSearch"); // TODO: check these are used from the UI or not
     String gEmessage = (String) searchCriteria.getParamValue("gEMessage");
-    String selectedComp = (String) searchCriteria.getParamValue("selectComp");
-    String bundleId = (String) searchCriteria.getParamValue(LogSearchConstants.BUNDLE_ID);
     String globalExcludeComp = (String) searchCriteria.getParamValue("gMustNot");
     String unselectedComp = (String) searchCriteria.getParamValue("unselectComp");
-    String urlHostName = (String) searchCriteria.getParamValue("host_name");
-    String urlComponentName = (String) searchCriteria.getParamValue("component_name");
-    String file_name = (String) searchCriteria.getParamValue("file_name");
-    String advQuery = (String) searchCriteria.getParamValue("advanceSearch");
+
+    String treeParams = searchCriteria.getTreeParams();
+    String givenQuery = (String) searchCriteria.getParamValue("q");
+    String level = searchCriteria.getLevel();
+    String startTime = searchCriteria.getFrom();
+    String endTime = searchCriteria.getTo();
+    String iMessage = searchCriteria.getIncludeMessage();
+    String eMessage = searchCriteria.getExcludeMessage();
+    String selectedComp = searchCriteria.getSelectComp();
+    String bundleId = searchCriteria.getBundleId();
+    String urlHostName = searchCriteria.getHostName();
+    String urlComponentName = searchCriteria.getComponentName();
+    String file_name = searchCriteria.getFileName();
     // build advance query
     if (!StringUtils.isBlank(advQuery)) {
       String advQueryParameters[] = advQuery.split(Pattern.quote("}{"));
       SolrQuery advSolrQuery = new SolrQuery();
       for (String queryParam : advQueryParameters) {
         String params[] = queryParam.split(Pattern.quote("="));
-        if (params != null && params.length > 1)
+        if (params.length > 1)
           advSolrQuery.setParam(params[0], params[1]);
       }
       setFilterClauseWithFieldName(advSolrQuery, level, LogSearchConstants.SOLR_LEVEL, "", Condition.OR);
@@ -226,18 +230,20 @@ public class QueryGeneration extends QueryGenerationBase {
     }
   }
 
-  public SolrQuery commonAuditFilterQuery(SearchCriteria searchCriteria) {
+  public SolrQuery commonAuditFilterQuery(CommonSearchCriteria searchCriteria) {
     LogType logType = LogType.AUDIT;
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
-    String startTime = (String) searchCriteria.getParamValue("startTime");
-    String endTime = (String) searchCriteria.getParamValue("endTime");
-    String selectedComp = (String) searchCriteria.getParamValue("includeString");
+
+    String globalExcludeComp = (String) searchCriteria.getParamValue("gMustNot"); // TODO: check this are used from UI or not
+    String unselectedComp = (String) searchCriteria.getParamValue("unselectComp");
+
+    String startTime = searchCriteria.getStartTime();
+    String endTime = searchCriteria.getEndTime();
+    String selectedComp = searchCriteria.getMustBe();
     setFilterClauseWithFieldName(solrQuery, selectedComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.NO_OPERATOR, Condition.OR);
-    String globalExcludeComp = (String) searchCriteria.getParamValue("gMustNot");
     setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY, logType);
     setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY, logType);
-    String unselectedComp = (String) searchCriteria.getParamValue("unselectComp");
     setFilterClauseWithFieldName(solrQuery, globalExcludeComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
     setFilterClauseWithFieldName(solrQuery, unselectedComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
     setSingleRangeFilter(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startTime, endTime);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/SearchCriteriaConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/SearchCriteriaConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/SearchCriteriaConstants.java
new file mode 100644
index 0000000..c67d3cd
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/SearchCriteriaConstants.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query;
+
+public class SearchCriteriaConstants {
+
+  private SearchCriteriaConstants() {
+  }
+
+  public static final String PARAM_FIELD = "field";
+  public static final String PARAM_UNIT = "unit";
+  public static final String PARAM_INCLUDE_MESSAGE = "iMessage";
+  public static final String PARAM_EXCLUDE_MESSAGE = "eMessage";
+  public static final String PARAM_MUST_BE_STRING = "includeString";
+  public static final String PARAM_MUST_NOT_STRING = "unselectComp";
+  public static final String PARAM_EXCLUDE_QUERY = "excludeQuery";
+  public static final String PARAM_INCLUDE_QUERY = "includeQuery";
+  public static final String PARAM_START_TIME = "startTime";
+  public static final String PARAM_END_TIME = "endTime";
+
+  public static final String PARAM_IS_LAST_PAGE = "isLastPage";
+
+  public static final String PARAM_GLOBAL_START_TIME = "globalStartTime";
+  public static final String PARAM_GLOBAL_END_TIME = "globalEndTime";
+
+  public static final String PARAM_X_AXIS = "xAxis";
+  public static final String PARAM_Y_AXIS = "yAxis";
+  public static final String PARAM_STACK_BY = "stackBy";
+  public static final String PARAM_FROM = "from";
+  public static final String PARAM_TO = "to";
+
+  public static final String PARAM_LOG_FILE_COMPONENT = "component";
+  public static final String PARAM_LOG_FILE_HOST = "host";
+  public static final String PARAM_LOG_FILE_TYPE = "logType";
+  public static final String PARAM_LOG_TAIL_SIZE = "tailSize";
+
+  public static final String PARAM_COMPONENT_NAME = "component_name";
+  public static final String PARAM_HOST_NAME = "host_name";
+  public static final String PARAM_FILE_NAME = "file_name";
+  public static final String PARAM_BUNDLE_ID = "bundle_id";
+  public static final String PARAM_SELECT_COMP = "selectComp";
+  public static final String PARAM_LEVEL = "level";
+  public static final String PARAM_TREE_PARAMS = "treeParams";
+
+  public static final String PARAM_HOST_LOG_FILE = "hostLogFile";
+  public static final String PARAM_COMPONENT_LOG_FILE = "compLogFile";
+  public static final String PARAM_ID = "id";
+  public static final String PARAM_SCROLL_TYPE = "scrollType";
+  public static final String PARAM_NUMBER_ROWS = "numberRows";
+
+  public static final String PARAM_FORMAT = "format";
+  public static final String PARAM_UTC_OFFSET = "utcOffset";
+  public static final String PARAM_KEYWORD = "keyword";
+  public static final String PARAM_SOURCE_LOG_ID = "sourceLogId";
+  public static final String PARAM_KEYWORD_TYPE = "keywordType";
+  public static final String PARAM_TOKEN = "token";
+
+  public static final String PARAM_USER_NAME = "username";
+  public static final String PARAM_FILTER_NAME = "filtername";
+  public static final String PARAM_ROW_TYPE = "rowtype";
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
index 798bd47..e25eb51 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
@@ -32,15 +32,15 @@ public abstract class AbstractCommonAuditLogRequestConverter<SOURCE extends Base
     RESULT criteria = createCriteria(request);
     criteria.addParam("q", request.getQuery());
     criteria.addParam("columnQuery", StringEscapeUtils.unescapeXml(request.getColumnQuery()));
-    criteria.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getiMessage()));
     criteria.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
-    criteria.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
-    criteria.addParam("includeString", request.getMustBe());
-    criteria.addParam("unselectComp", request.getMustNot());
-    criteria.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
-    criteria.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
-    criteria.addParam("startTime", request.getFrom());
-    criteria.addParam("endTime", request.getTo());
+    criteria.setIncludeMessage(StringEscapeUtils.unescapeXml(request.getiMessage()));
+    criteria.setExcludeMessage(StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.setMustBe(request.getMustBe());
+    criteria.setMustNot(request.getMustNot());
+    criteria.setExcludeQuery(StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
+    criteria.setIncludeQuery(StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
+    criteria.setStartTime(request.getFrom());
+    criteria.setEndTime(request.getTo());
     return criteria;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
index 3fd07e2..ea2c28a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
@@ -21,10 +21,12 @@ package org.apache.ambari.logsearch.query.converter;
 import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest;
 import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
 import org.apache.commons.lang.StringUtils;
-import org.springframework.core.convert.converter.Converter;
+
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_GLOBAL_END_TIME;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_GLOBAL_START_TIME;
 
 public abstract class AbstractCommonSearchRequestConverter<SOURCE extends CommonSearchRequest, RESULT extends CommonSearchCriteria>
-  implements Converter<SOURCE, RESULT> {
+  extends AbstractConverterAware<SOURCE, RESULT> {
 
   @Override
   public RESULT convert(SOURCE source) {
@@ -43,11 +45,11 @@ public abstract class AbstractCommonSearchRequestConverter<SOURCE extends Common
     criteria.setSortType(request.getSortType());
     if (StringUtils.isNotEmpty(request.getStartTime())){
       criteria.setGlobalStartTime(request.getStartTime());
-      criteria.getUrlParamMap().put("globalStartTime", request.getStartTime());
+      criteria.getUrlParamMap().put(PARAM_GLOBAL_START_TIME, request.getStartTime());
     }
     if (StringUtils.isNotEmpty(request.getEndTime())){
       criteria.setGlobalEndTime(request.getEndTime());
-      criteria.getUrlParamMap().put("globalEndTime", request.getEndTime());
+      criteria.getUrlParamMap().put(PARAM_GLOBAL_END_TIME, request.getEndTime());
     }
   }
 }


[31/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java
new file mode 100644
index 0000000..41d8a31
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeData.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.Collection;
+
+@ApiModel
+@JsonInclude(value = JsonInclude.Include.NON_NULL)
+public class NodeData {
+
+  @ApiModelProperty
+  private String name;
+
+  @ApiModelProperty
+  private String type;
+
+  @ApiModelProperty
+  private String value;
+
+  @ApiModelProperty
+  private Collection<NodeData> childs;
+
+  @ApiModelProperty
+  private Collection<NameValueData> logLevelCount;
+
+  @ApiModelProperty
+  @JsonProperty("isParent")
+  private boolean parent;
+
+  @ApiModelProperty
+  @JsonProperty("isRoot")
+  private boolean root;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getValue() {
+    return value;
+  }
+
+  public void setValue(String value) {
+    this.value = value;
+  }
+
+  public boolean isRoot() {
+    return root;
+  }
+
+  public void setRoot(boolean root) {
+    this.root = root;
+  }
+
+  public Collection<NodeData> getChilds() {
+    return childs;
+  }
+
+  public void setChilds(Collection<NodeData> childs) {
+    this.childs = childs;
+  }
+
+  public Collection<NameValueData> getLogLevelCount() {
+    return logLevelCount;
+  }
+
+  public void setLogLevelCount(Collection<NameValueData> logLevelCount) {
+    this.logLevelCount = logLevelCount;
+  }
+
+  public boolean isParent() {
+    return parent;
+  }
+
+  public void setParent(boolean parent) {
+    this.parent = parent;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java
new file mode 100644
index 0000000..51044b3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NodeListResponse.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class NodeListResponse extends SearchResponse {
+
+  @ApiModelProperty
+  protected List<NodeData> vNodeList = new ArrayList<NodeData>();
+
+  public List<NodeData> getvNodeList() {
+    return vNodeList;
+  }
+
+  public void setvNodeList(List<NodeData> vNodeList) {
+    this.vNodeList = vNodeList;
+  }
+
+  @Override
+  public int getListSize() {
+    if (vNodeList == null) {
+      return 0;
+    }
+    return vNodeList.size();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java
new file mode 100644
index 0000000..dd88d29
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/SearchResponse.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public abstract class SearchResponse {
+  /**
+   * Start index for the result
+   */
+  private int startIndex;
+  /**
+   * Page size used for the result
+   */
+  private int pageSize;
+  /**
+   * Total records in the database for the given search conditions
+   */
+  private long totalCount;
+  /**
+   * Number of rows returned for the search condition
+   */
+  private int resultSize;
+  /**
+   * Sort type. Either desc or asc
+   */
+  private String sortType;
+  /**
+   * Comma seperated list of the fields for sorting
+   */
+  private String sortBy;
+
+  private long queryTimeMS = System.currentTimeMillis();
+
+  public int getStartIndex() {
+    return startIndex;
+  }
+
+  public int getPageSize() {
+    return pageSize;
+  }
+
+  public long getTotalCount() {
+    return totalCount;
+  }
+
+  public int getResultSize() {
+    return resultSize;
+  }
+
+  public String getSortType() {
+    return sortType;
+  }
+
+  public String getSortBy() {
+    return sortBy;
+  }
+
+  public long getQueryTimeMS() {
+    return queryTimeMS;
+  }
+
+  public void setStartIndex(int startIndex) {
+    this.startIndex = startIndex;
+  }
+
+  public void setPageSize(int pageSize) {
+    this.pageSize = pageSize;
+  }
+
+  public void setTotalCount(long totalCount) {
+    this.totalCount = totalCount;
+  }
+
+  public void setResultSize(int resultSize) {
+    this.resultSize = resultSize;
+  }
+
+  public void setSortType(String sortType) {
+    this.sortType = sortType;
+  }
+
+  public void setSortBy(String sortBy) {
+    this.sortBy = sortBy;
+  }
+
+  public void setQueryTimeMS(long queryTimeMS) {
+    this.queryTimeMS = queryTimeMS;
+  }
+
+  public abstract int getListSize();
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java
new file mode 100644
index 0000000..16aed16
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogData.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.Date;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public interface ServiceLogData extends CommonLogData, ComponentTypeLogData, HostLogData {
+
+  @JsonProperty("level")
+  String getLevel();
+
+  void setLevel(String level);
+
+  @JsonProperty("line_number")
+  Integer getLineNumber();
+
+  void setLineNumber(Integer lineNumber);
+
+  @JsonProperty("logtime")
+  Date getLogTime();
+
+  void setLogTime(Date logTime);
+
+  @JsonProperty("ip")
+  String getIp();
+
+  void setIp(String ip);
+
+  @JsonProperty("path")
+  String getPath();
+
+  void setPath(String path);
+
+  @JsonProperty("type")
+  String getType();
+
+  void setType(String type);
+
+  @JsonProperty("host")
+  String getHost();
+
+  void setHost(String host);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java
new file mode 100644
index 0000000..2e689c1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ServiceLogResponse.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class ServiceLogResponse extends LogSearchResponse<ServiceLogData> {
+
+  @ApiModelProperty
+  private List<ServiceLogData> logList;
+
+  @Override
+  public List<ServiceLogData> getLogList() {
+    return logList;
+  }
+
+  @Override
+  public void setLogList(List<ServiceLogData> logList) {
+    this.logList = logList;
+  }
+
+  @Override
+  public int getListSize() {
+    return logList == null ? 0 : logList.size();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index 16cf932..10224bc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -28,9 +28,11 @@ import java.util.regex.Pattern;
 import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.conf.SolrAuditLogConfig;
+import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.commons.lang.StringUtils;
@@ -44,11 +46,19 @@ import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieLongField;
 import org.springframework.stereotype.Component;
 
+import javax.inject.Inject;
+
 @Component
 public class QueryGeneration extends QueryGenerationBase {
 
   private static Logger logger = Logger.getLogger(QueryGeneration.class);
 
+  @Inject
+  private SolrServiceLogConfig solrServiceLogConfig;
+
+  @Inject
+  private SolrAuditLogConfig solrAuditLogConfig;
+
   public SolrQuery commonServiceFilterQuery(SearchCriteria searchCriteria) {
     LogType logType = LogType.SERVICE;
     SolrQuery solrQuery = new SolrQuery();
@@ -349,10 +359,10 @@ public class QueryGeneration extends QueryGenerationBase {
     String originalKey;
     switch (logType) {
     case AUDIT:
-      originalKey = ConfigHelper.auditLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
+      originalKey = solrAuditLogConfig.getSolrAndUiColumns().get(key + LogSearchConstants.UI_SUFFIX);
       break;
     case SERVICE:
-      originalKey = ConfigHelper.serviceLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
+      originalKey = solrServiceLogConfig.getSolrAndUiColumns().get(key + LogSearchConstants.UI_SUFFIX);
       break;
     default:
       originalKey = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
index 77d4969..d7d30d7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
@@ -23,7 +23,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
 import org.apache.ambari.logsearch.dao.AuditSolrDao;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.util.SolrUtil;
@@ -31,21 +31,22 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.ORDER;
-import org.springframework.beans.factory.annotation.Autowired;
 
 import com.google.gson.Gson;
 
+import javax.inject.Inject;
+
 public abstract class QueryGenerationBase {
 
   private static final Logger logger = Logger.getLogger(QueryGenerationBase.class);
   
-  @Autowired
+  @Inject
   protected AuditSolrDao auditSolrDao;
   
-  @Autowired
+  @Inject
   protected ServiceLogsSolrDao serviceLogsSolrDao;
 
-  public static enum Condition {
+  public enum Condition {
     OR, AND
   }
 
@@ -98,7 +99,6 @@ public abstract class QueryGenerationBase {
         solrQuery.addFilterQuery(filterQuery);
         logger.debug("Filter added :- " + filterQuery);
       }
-
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
new file mode 100644
index 0000000..798bd47
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonAuditLogRequestConverter.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.BaseAuditLogRequest;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.springframework.stereotype.Component;
+
+@Component
+public abstract class AbstractCommonAuditLogRequestConverter<SOURCE extends BaseAuditLogRequest, RESULT extends CommonSearchCriteria>
+  extends AbstractCommonSearchRequestConverter<SOURCE, RESULT> {
+
+  @Override
+  public RESULT convertToSearchCriteria(SOURCE request) {
+    RESULT criteria = createCriteria(request);
+    criteria.addParam("q", request.getQuery());
+    criteria.addParam("columnQuery", StringEscapeUtils.unescapeXml(request.getColumnQuery()));
+    criteria.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getiMessage()));
+    criteria.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.addParam("includeString", request.getMustBe());
+    criteria.addParam("unselectComp", request.getMustNot());
+    criteria.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
+    criteria.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
+    criteria.addParam("startTime", request.getFrom());
+    criteria.addParam("endTime", request.getTo());
+    return criteria;
+  }
+
+  public abstract RESULT createCriteria(SOURCE request);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
new file mode 100644
index 0000000..3fd07e2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonSearchRequestConverter.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.CommonSearchRequest;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.commons.lang.StringUtils;
+import org.springframework.core.convert.converter.Converter;
+
+public abstract class AbstractCommonSearchRequestConverter<SOURCE extends CommonSearchRequest, RESULT extends CommonSearchCriteria>
+  implements Converter<SOURCE, RESULT> {
+
+  @Override
+  public RESULT convert(SOURCE source) {
+    RESULT criteria = convertToSearchCriteria(source);
+    addDefaultParams(source, criteria);
+    return criteria;
+  }
+
+  public abstract RESULT convertToSearchCriteria(SOURCE source);
+
+  private void addDefaultParams(SOURCE request, RESULT criteria) {
+    criteria.setStartIndex(StringUtils.isNumeric(request.getStartIndex()) ? new Integer(request.getStartIndex()) : 0);
+    criteria.setPage(StringUtils.isNumeric(request.getPage()) ? new Integer(request.getPage()) : 0);
+    criteria.setMaxRows(StringUtils.isNumeric(request.getPageSize()) ? new Integer(request.getPageSize()) : 50);
+    criteria.setSortBy(request.getSortBy());
+    criteria.setSortType(request.getSortType());
+    if (StringUtils.isNotEmpty(request.getStartTime())){
+      criteria.setGlobalStartTime(request.getStartTime());
+      criteria.getUrlParamMap().put("globalStartTime", request.getStartTime());
+    }
+    if (StringUtils.isNotEmpty(request.getEndTime())){
+      criteria.setGlobalEndTime(request.getEndTime());
+      criteria.getUrlParamMap().put("globalEndTime", request.getEndTime());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
new file mode 100644
index 0000000..a02d585
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.commons.lang.StringEscapeUtils;
+
+public abstract class AbstractCommonServiceLogRequestConverter<SOURCE extends BaseServiceLogRequest, RESULT extends CommonSearchCriteria>
+  extends AbstractCommonSearchRequestConverter<SOURCE, RESULT> {
+
+  @Override
+  public RESULT convertToSearchCriteria(SOURCE request) {
+    RESULT criteria = createCriteria(request);
+    criteria.addParam("advanceSearch", StringEscapeUtils.unescapeXml(request.getAdvancedSearch()));
+    criteria.addParam("q", request.getQuery());
+    criteria.addParam("treeParams", StringEscapeUtils.unescapeHtml(request.getTreeParams()));
+    criteria.addParam("level", request.getLevel());
+    criteria.addParam("gMustNot", request.getgMustNot());
+    criteria.addParam("from", request.getFrom());
+    criteria.addParam("to", request.getTo());
+    criteria.addParam("selectComp", request.getMustBe());
+    criteria.addParam("unselectComp", request.getMustNot());
+    criteria.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getiMessage()));
+    criteria.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.addParam(LogSearchConstants.BUNDLE_ID, request.getBundleId());
+    criteria.addParam("host_name", request.getHostName());
+    criteria.addParam("component_name", request.getComponentName());
+    criteria.addParam("file_name", request.getFileName());
+    criteria.addParam("startDate", request.getStartTime());
+    criteria.addParam("endDate", request.getEndTime());
+    criteria.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
+    criteria.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
+    return criteria;
+  }
+
+  public abstract RESULT createCriteria(SOURCE request);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
new file mode 100644
index 0000000..0372168
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.AnyGraphRequest;
+import org.apache.ambari.logsearch.query.model.AnyGraphSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AnyGraphRequestConverter extends AbstractCommonSearchRequestConverter<AnyGraphRequest, AnyGraphSearchCriteria> {
+
+  @Override
+  public AnyGraphSearchCriteria convertToSearchCriteria(AnyGraphRequest anyGraphRequest) {
+    AnyGraphSearchCriteria criteria = new AnyGraphSearchCriteria();
+    criteria.addParam("xAxis", anyGraphRequest.getxAxis());
+    criteria.addParam("yAxis", anyGraphRequest.getyAxis());
+    criteria.addParam("stackBy", anyGraphRequest.getStackBy());
+    criteria.addParam("unit", anyGraphRequest.getUnit());
+    criteria.addParam("from", anyGraphRequest.getFrom());
+    criteria.addParam("to", anyGraphRequest.getTo());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
new file mode 100644
index 0000000..f72a673
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.AuditBarGraphRequest;
+import org.apache.ambari.logsearch.query.model.AuditBarGraphSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AuditBarGraphRequestConverter extends AbstractCommonAuditLogRequestConverter<AuditBarGraphRequest, AuditBarGraphSearchCriteria>{
+
+  @Override
+  public AuditBarGraphSearchCriteria createCriteria(AuditBarGraphRequest request) {
+    AuditBarGraphSearchCriteria criteria = new AuditBarGraphSearchCriteria();
+    criteria.addParam("unit", request.getUnit());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
new file mode 100644
index 0000000..27d314d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.AuditLogRequest;
+import org.apache.ambari.logsearch.query.model.AuditLogSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AuditLogRequestConverter extends AbstractCommonAuditLogRequestConverter<AuditLogRequest, AuditLogSearchCriteria> {
+
+  @Override
+  public AuditLogSearchCriteria createCriteria(AuditLogRequest request) {
+    AuditLogSearchCriteria criteria = new AuditLogSearchCriteria();
+    criteria.addParam("isLastPage", request.isLastPage());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseAuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseAuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseAuditLogRequestConverter.java
new file mode 100644
index 0000000..35aceb2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseAuditLogRequestConverter.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.BaseAuditLogRequest;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class BaseAuditLogRequestConverter extends AbstractCommonAuditLogRequestConverter<BaseAuditLogRequest, CommonSearchCriteria> {
+
+  @Override
+  public CommonSearchCriteria createCriteria(BaseAuditLogRequest request) {
+    return new CommonSearchCriteria();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
new file mode 100644
index 0000000..cfd544c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class BaseServiceLogRequestConverter extends AbstractCommonServiceLogRequestConverter<BaseServiceLogRequest, CommonSearchCriteria> {
+
+  @Override
+  public CommonSearchCriteria createCriteria(BaseServiceLogRequest request) {
+    return new CommonSearchCriteria();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
new file mode 100644
index 0000000..089b593
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.FieldAuditLogRequest;
+import org.apache.ambari.logsearch.query.model.FieldAuditLogSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class FieldAuditLogRequestConverter extends AbstractCommonAuditLogRequestConverter<FieldAuditLogRequest, FieldAuditLogSearchCriteria> {
+
+  @Override
+  public FieldAuditLogSearchCriteria createCriteria(FieldAuditLogRequest request) {
+    FieldAuditLogSearchCriteria criteria = new FieldAuditLogSearchCriteria();
+    criteria.addParam("field", request.getField());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
new file mode 100644
index 0000000..dd518f8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.FieldAuditBarGraphRequest;
+import org.apache.ambari.logsearch.query.model.FieldAuditBarGraphSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class FieldBarGraphRequestConverter extends AbstractCommonAuditLogRequestConverter<FieldAuditBarGraphRequest, FieldAuditBarGraphSearchCriteria> {
+
+  @Override
+  public FieldAuditBarGraphSearchCriteria createCriteria(FieldAuditBarGraphRequest request) {
+    FieldAuditBarGraphSearchCriteria criteria = new FieldAuditBarGraphSearchCriteria();
+    criteria.addParam("unit", request.getUnit());
+    criteria.addParam("field", request.getField());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
new file mode 100644
index 0000000..7c3038e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.LogFileRequest;
+import org.apache.ambari.logsearch.query.model.LogFileSearchCriteria;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.stereotype.Component;
+
+@Component
+public class LogFileRequestConverter implements Converter<LogFileRequest, LogFileSearchCriteria> {
+
+  @Override
+  public LogFileSearchCriteria convert(LogFileRequest request) {
+    LogFileSearchCriteria criteria = new LogFileSearchCriteria();
+    criteria.addParam("component", request.getComponent());
+    criteria.addParam("host", request.getHost());
+    criteria.addParam("logType", request.getLogType());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
new file mode 100644
index 0000000..88b1a34
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.LogFileTailRequest;
+import org.apache.ambari.logsearch.query.model.LogFileTailSearchCriteria;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.stereotype.Component;
+
+@Component
+public class LogFileTailRequestConverter implements Converter<LogFileTailRequest, LogFileTailSearchCriteria> {
+
+  @Override
+  public LogFileTailSearchCriteria convert(LogFileTailRequest request) {
+    LogFileTailSearchCriteria criteria = new LogFileTailSearchCriteria();
+    criteria.addParam("component", request.getComponent());
+    criteria.addParam("host", request.getHost());
+    criteria.addParam("logType", request.getLogType());
+    criteria.addParam("tailSize", request.getTailSize());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
new file mode 100644
index 0000000..7a559de
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceAnyGraphRequest;
+import org.apache.ambari.logsearch.query.model.ServiceAnyGraphSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceAnyGraphRequestConverter extends AbstractCommonServiceLogRequestConverter<ServiceAnyGraphRequest, ServiceAnyGraphSearchCriteria> {
+
+  @Override
+  public ServiceAnyGraphSearchCriteria createCriteria(ServiceAnyGraphRequest anyGraphRequest) {
+    ServiceAnyGraphSearchCriteria criteria = new ServiceAnyGraphSearchCriteria();
+    criteria.addParam("xAxis", anyGraphRequest.getxAxis());
+    criteria.addParam("yAxis", anyGraphRequest.getyAxis());
+    criteria.addParam("stackBy", anyGraphRequest.getStackBy());
+    criteria.addParam("unit", anyGraphRequest.getUnit());
+    criteria.addParam("from", anyGraphRequest.getFrom());
+    criteria.addParam("to", anyGraphRequest.getTo());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
new file mode 100644
index 0000000..fe81468
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.impl.ServiceExtremeDatesRequest;
+import org.apache.ambari.logsearch.query.model.ServiceExtremeDatesCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceExtremeDatesRequestConverter extends AbstractCommonSearchRequestConverter<ServiceExtremeDatesRequest, ServiceExtremeDatesCriteria> {
+
+  @Override
+  public ServiceExtremeDatesCriteria convertToSearchCriteria(ServiceExtremeDatesRequest request) {
+    ServiceExtremeDatesCriteria criteria = new ServiceExtremeDatesCriteria();
+    criteria.addParam(LogSearchConstants.BUNDLE_ID, request.getBundleId());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
new file mode 100644
index 0000000..19165c0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceGraphRequest;
+import org.apache.ambari.logsearch.query.model.ServiceGraphSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceGraphRequestConverter extends AbstractCommonServiceLogRequestConverter<ServiceGraphRequest, ServiceGraphSearchCriteria> {
+
+  @Override
+  public ServiceGraphSearchCriteria createCriteria(ServiceGraphRequest request) {
+    ServiceGraphSearchCriteria criteria = new ServiceGraphSearchCriteria();
+    criteria.addParam("hostLogFile", request.getHostLogFile());
+    criteria.addParam("compLogFile", request.getComponentLogFile());
+    criteria.addParam("unit", request.getUnit());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
new file mode 100644
index 0000000..7d83e49
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogExportRequest;
+import org.apache.ambari.logsearch.query.model.ServiceLogExportSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceLogExportRequestConverter extends AbstractCommonServiceLogRequestConverter<ServiceLogExportRequest, ServiceLogExportSearchCriteria> {
+
+  @Override
+  public ServiceLogExportSearchCriteria createCriteria(ServiceLogExportRequest request) {
+    ServiceLogExportSearchCriteria criteria = new ServiceLogExportSearchCriteria();
+    criteria.addParam("hostLogFile", request.getHostLogFile());
+    criteria.addParam("compLogFile",
+      request.getComponentLogFile());
+    criteria.addParam("format", request.getFormat());
+    criteria.addParam("utcOffset", request.getUtcOffset());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
new file mode 100644
index 0000000..f5148f3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogFileRequest;
+import org.apache.ambari.logsearch.query.model.ServiceLogFileSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceLogFileRequestConverter
+  extends AbstractCommonServiceLogRequestConverter<ServiceLogFileRequest, ServiceLogFileSearchCriteria> {
+
+  @Override
+  public ServiceLogFileSearchCriteria createCriteria(ServiceLogFileRequest request) {
+    ServiceLogFileSearchCriteria criteria = new ServiceLogFileSearchCriteria();
+    criteria.addParam("hostLogFile", request.getHostLogFile());
+    criteria.addParam("compLogFile", request.getComponentLogFile());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
new file mode 100644
index 0000000..6a70d55
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogRequest;
+import org.apache.ambari.logsearch.query.model.ServiceLogSearchCriteria;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceLogRequestConverter extends AbstractCommonServiceLogRequestConverter<ServiceLogRequest, ServiceLogSearchCriteria> {
+
+  @Override
+  public ServiceLogSearchCriteria createCriteria(ServiceLogRequest request) {
+    ServiceLogSearchCriteria criteria = new ServiceLogSearchCriteria();
+    criteria.addParam("hostLogFile", request.getHostLogFile());
+    criteria.addParam("compLogFile", request.getComponentLogFile());
+    criteria.addParam("keyword", StringEscapeUtils.unescapeXml(request.getKeyWord()));
+    criteria.addParam("sourceLogId", request.getSourceLogId());
+    criteria.addParam("keywordType", request.getKeywordType());
+    criteria.addParam("token", request.getToken());
+    criteria.addParam("isLastPage", request.isLastPage());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
new file mode 100644
index 0000000..676f049
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.ServiceLogTruncatedRequest;
+import org.apache.ambari.logsearch.query.model.ServiceLogTruncatedSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceLogTruncatedRequestConverter extends AbstractCommonServiceLogRequestConverter<ServiceLogTruncatedRequest, ServiceLogTruncatedSearchCriteria> {
+
+  @Override
+  public ServiceLogTruncatedSearchCriteria createCriteria(ServiceLogTruncatedRequest request) {
+    ServiceLogTruncatedSearchCriteria criteria = new ServiceLogTruncatedSearchCriteria();
+    criteria.addParam("hostLogFile", request.getHostLogFile());
+    criteria.addParam("compLogFile", request.getComponentLogFile());
+    criteria.addParam("id", request.getId());
+    criteria.addParam("scrollType", request.getScrollType());
+    criteria.addParam("numberRows", request.getNumberRows());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
new file mode 100644
index 0000000..8c50f66
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.SimpleQueryRequest;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.stereotype.Component;
+
+@Component
+public class SimpleQueryRequestConverter implements Converter<SimpleQueryRequest, SearchCriteria> {
+  @Override
+  public SearchCriteria convert(SimpleQueryRequest simpleQueryRequest) {
+    SearchCriteria searchCriteria = new SearchCriteria();
+    searchCriteria.addParam("q", simpleQueryRequest.getQuery());
+    return searchCriteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
new file mode 100644
index 0000000..50847c7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.impl.UserConfigRequest;
+import org.apache.ambari.logsearch.query.model.UserConfigSearchCriteria;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.stereotype.Component;
+
+@Component
+public class UserConfigRequestConverter implements Converter<UserConfigRequest, UserConfigSearchCriteria> {
+
+  @Override
+  public UserConfigSearchCriteria convert(UserConfigRequest request) {
+    UserConfigSearchCriteria criteria = new UserConfigSearchCriteria();
+    criteria.addParam(LogSearchConstants.USER_NAME, request.getUserId());
+    criteria.addParam(LogSearchConstants.FILTER_NAME, request.getFilterName());
+    criteria.addParam(LogSearchConstants.ROW_TYPE, request.getRowType());
+    return criteria;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
new file mode 100644
index 0000000..a0b5f0f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.apache.ambari.logsearch.model.request.impl.UserExportRequest;
+import org.apache.ambari.logsearch.query.model.UserExportSearchCriteria;
+import org.springframework.stereotype.Component;
+
+@Component
+public class UserExportRequestConverter extends AbstractCommonAuditLogRequestConverter<UserExportRequest, UserExportSearchCriteria> {
+
+  @Override
+  public UserExportSearchCriteria createCriteria(UserExportRequest request) {
+    UserExportSearchCriteria criteria = new UserExportSearchCriteria();
+    criteria.addParam("field", request.getField());
+    return criteria;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
new file mode 100644
index 0000000..a11c056
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class AnyGraphSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
new file mode 100644
index 0000000..c41ec15
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class AuditBarGraphSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
new file mode 100644
index 0000000..f4fe207
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class AuditLogSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
new file mode 100644
index 0000000..47d12e5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+public class CommonSearchCriteria extends SearchCriteria {
+  private int startIndex = 0;
+  private int maxRows = Integer.MAX_VALUE;
+  private String sortBy = null;
+  private String sortType = null;
+  private int page = 0;
+
+  private String globalStartTime = null;
+  private String globalEndTime = null;
+
+  @Override
+  public int getStartIndex() {
+    return startIndex;
+  }
+
+  @Override
+  public void setStartIndex(int startIndex) {
+    this.startIndex = startIndex;
+  }
+
+  @Override
+  public int getMaxRows() {
+    return maxRows;
+  }
+
+  @Override
+  public void setMaxRows(int maxRows) {
+    this.maxRows = maxRows;
+  }
+
+  @Override
+  public String getSortType() {
+    return sortType;
+  }
+
+  @Override
+  public void setSortType(String sortType) {
+    this.sortType = sortType;
+  }
+
+  @Override
+  public String getSortBy() {
+    return sortBy;
+  }
+
+  @Override
+  public void setSortBy(String sortBy) {
+    this.sortBy = sortBy;
+  }
+
+  @Override
+  public int getPage() {
+    return page;
+  }
+
+  @Override
+  public void setPage(int page) {
+    this.page = page;
+  }
+
+  @Override
+  public String getGlobalStartTime() {
+    return globalStartTime;
+  }
+
+  @Override
+  public void setGlobalStartTime(String globalStartTime) {
+    this.globalStartTime = globalStartTime;
+  }
+
+  @Override
+  public String getGlobalEndTime() {
+    return globalEndTime;
+  }
+
+  @Override
+  public void setGlobalEndTime(String globalEndTime) {
+    this.globalEndTime = globalEndTime;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
new file mode 100644
index 0000000..f931f5d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class FieldAuditBarGraphSearchCriteria extends AuditBarGraphSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
new file mode 100644
index 0000000..8dd5854
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class FieldAuditLogSearchCriteria extends CommonSearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
new file mode 100644
index 0000000..e4e2a14
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class LogFileSearchCriteria extends SearchCriteria {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
new file mode 100644
index 0000000..fecb396
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public class LogFileTailSearchCriteria extends SearchCriteria {
+}


[08/50] [abbrv] ambari git commit: AMBARI-18214. Restify Log Search endpoints (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VGroupListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VGroupListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VGroupListBase.js
index a34aaa3..0c81cc3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VGroupListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VGroupListBase.js
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,18 +17,18 @@
  * under the License.
  */
 
- 
+
 define(['require',
 	'collections/BaseCollection',
 	'utils/Globals',
 	'models/VGroup'
 ],function(require,BaseCollection,Globals,VGroup){
-	'use strict';	
+	'use strict';
 
 	var VGroupListBase = BaseCollection.extend(
 	/** @lends VGroupListBase.prototype */
 	{
-		url: Globals.baseURL + 'dashboard/hosts',
+		url: Globals.baseURL + 'service/logs/hosts',
 
 		model : VGroup,
 
@@ -44,7 +44,7 @@ define(['require',
             this._changes = { };
 			this.on('change', this._onChange);
 		},
-		
+
 		_onChange : function(m){
             this._changes[m.id] = m;
 		},
@@ -59,7 +59,7 @@ define(['require',
 
 		getUsersOfGroup : function(groupId, options){
 			var url = Globals.baseURL  + 'xusers/'  + groupId + '/users';
-			
+
 			options = _.extend({
 				//data : JSON.stringify(postData),
 				contentType : 'application/json',
@@ -80,5 +80,3 @@ define(['require',
 
     return VGroupListBase;
 });
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogLevelListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogLevelListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogLevelListBase.js
index 59b5ae8..0e3de20 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogLevelListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogLevelListBase.js
@@ -28,7 +28,7 @@ define(['require',
 	var VLogLevelListBase = BaseCollection.extend(
 	/** @lends VLogLevelListBase.prototype */
 	{
-		url: Globals.baseURL + 'dashboard/levels_count',
+		url: Globals.baseURL + 'service/logs/levels/count',
 
 		model : VLogLevel,
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogListBase.js
index 7b102d5..72ab530 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VLogListBase.js
@@ -58,7 +58,7 @@ define(['require',
 		 *************************/
 
 		cancelFindRequest : function(token, options){
-			var url = Globals.baseURL  + 'dashboard/cancelFindRequest';
+			var url = Globals.baseURL  + 'service/logs/request/cancel';
 			
 			options = _.extend({
 				data : $.param(token),
@@ -69,7 +69,7 @@ define(['require',
 			return this.constructor.nonCrudOperation.call(this, url, 'GET', options);
 		},
 		getServiceLogFields : function(token, options){
-			var url = Globals.baseURL  + 'dashboard/getServiceLogsFieldsName';
+			var url = Globals.baseURL  + 'service/logs/fields';
 			
 			options = _.extend({
 				data : $.param(token),
@@ -80,7 +80,7 @@ define(['require',
 			return this.constructor.nonCrudOperation.call(this, url, 'GET', options);
 		},
 		getServiceLogSchemaFields : function(token, options){
-			var url = Globals.baseURL  + 'dashboard/getServiceLogsSchemaFieldsName';
+			var url = Globals.baseURL  + 'service/logs/schema/fields';
 			
 			options = _.extend({
 				data : $.param(token),
@@ -91,7 +91,7 @@ define(['require',
 			return this.constructor.nonCrudOperation.call(this, url, 'GET', options);
 		},
 		getTruncatedLogs : function(token, options){
-			var url = Globals.baseURL  + 'dashboard/getAfterBeforeLogs';
+			var url = Globals.baseURL  + 'service/logs/truncated';
 			
 			options = _.extend({
 				data : $.param(token),
@@ -102,7 +102,7 @@ define(['require',
 			return this.constructor.nonCrudOperation.call(this, url, 'GET', options);
 		},
 		getServicesInfo : function(options){
-			var url = Globals.baseURL  + 'dashboard/getHadoopServiceConfigJSON';
+			var url = Globals.baseURL  + 'service/logs/serviceconfig';
 			
 			options = _.extend({
 				//data : $.param(token),

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNameValueListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNameValueListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNameValueListBase.js
index d59eaa2..71e80d9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNameValueListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNameValueListBase.js
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,18 +17,18 @@
  * under the License.
  */
 
- 
+
 define(['require',
 	'collections/BaseCollection',
 	'utils/Globals',
 	'models/VNameValue'
 ],function(require,BaseCollection,Globals,VNameValue){
-	'use strict';	
+	'use strict';
 
 	var VNameValueListBase = BaseCollection.extend(
 	/** @lends VNameValueListBase.prototype */
 	{
-		url: Globals.baseURL + 'dashboard/hosts',
+		url: Globals.baseURL + 'service/logs/hosts',
 
 		model : VNameValue,
 
@@ -42,7 +42,7 @@ define(['require',
 			this.modelAttrName = 'vnameValues';
 			this.bindErrorEvents();
 		}
-		
+
 	},{
 	/**
 	* Table Cols to be passed to Backgrid
@@ -55,5 +55,3 @@ define(['require',
 
     return VNameValueListBase;
 });
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNodeListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNodeListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNodeListBase.js
index 7c7dcf8..78949e3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNodeListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VNodeListBase.js
@@ -28,7 +28,7 @@ define(['require',
 	var VNodeListBase = BaseCollection.extend(
 	/** @lends VNodeListBase.prototype */
 	{
-		url: Globals.baseURL + 'dashboard/getTreeExtension',
+		url: Globals.baseURL + 'service/logs/tree',
 
 		model : VNode,
 
@@ -48,7 +48,7 @@ define(['require',
 		 *************************/
 
 		cancelFindRequest : function(token, options){
-			var url = Globals.baseURL  + 'dashboard/cancelFindRequest';
+			var url = Globals.baseURL  + 'service/logs/request/cancel';
 			
 			options = _.extend({
 				data : $.param(token),

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VAuditLogBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VAuditLogBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VAuditLogBase.js
index 1283875..8513ed2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VAuditLogBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VAuditLogBase.js
@@ -42,7 +42,7 @@ define(['require',
 			this.modelName = 'VAuditLogBase';
 		},
 		auditLiveFeed : function(token, options){
-			var url = Globals.baseURL  + 'audit/getLiveLogsCount';
+			var url = Globals.baseURL  + 'audit/logs/live/count';
 			
 			options = _.extend({
 				data : $.param(token),

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VCommonModelBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VCommonModelBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VCommonModelBase.js
index 4723e3e..bbc3e36 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VCommonModelBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VCommonModelBase.js
@@ -27,7 +27,7 @@ define(['require',
 	var VCommonModelBase = BaseModel.extend(
 	/** @lends VCommonModelBase.prototype */
 	{
-		urlRoot: Globals.baseURL + 'dashboard/getTreeExtension',
+		urlRoot: Globals.baseURL + 'service/logs/tree',
 		
 		defaults: {},
 		

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VEventHistoryBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VEventHistoryBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VEventHistoryBase.js
index c237ade..a15cdbd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VEventHistoryBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VEventHistoryBase.js
@@ -27,7 +27,7 @@ define(['require',
     var VEventHistory = BaseModel.extend(
         /** @lends VEventHistory.prototype */
         {
-            urlRoot: Globals.baseURL + 'dashboard/aggregatedData',
+            urlRoot: Globals.baseURL + 'service/logs/aggregated',
 
             defaults: {},
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VGraphInfoBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VGraphInfoBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VGraphInfoBase.js
index a707629..46e826c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VGraphInfoBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VGraphInfoBase.js
@@ -27,7 +27,7 @@ define(['require',
 	var VLogBase = BaseModel.extend(
 	/** @lends VLogBase.prototype */
 	{
-		urlRoot: Globals.baseURL + 'dashboard/aggregatedData',
+		urlRoot: Globals.baseURL + 'service/logs/aggregated',
 		
 		defaults: {},
 		

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VLogLevelBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VLogLevelBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VLogLevelBase.js
index 0384bc2..daaaa32 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VLogLevelBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VLogLevelBase.js
@@ -27,7 +27,7 @@ define(['require',
 	var VLogLevelBase = BaseModel.extend(
 	/** @lends VLogLevelBase.prototype */
 	{
-		urlRoot: Globals.baseURL + 'dashboard/levels_count',
+		urlRoot: Globals.baseURL + 'service/logs/levels/count',
 		
 		defaults: {},
 		

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
index 35171aa..da299a0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
@@ -27,7 +27,7 @@ define(['require',
 	var VUserFilterBase = BaseModel.extend(
 	/** @lends VUserFilterBase.prototype */
 	{
-		urlRoot: Globals.baseURL + 'userconfig/user_filter',
+		urlRoot: Globals.baseURL + 'userconfig/users/filter',
 
 		defaults: {},
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
index 331ffd6..62d785b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/ViewUtils.js
@@ -58,7 +58,7 @@ define(['require',
         if (params.bundle_id && !params.start_time && !params.end_time) {
             var collection = new VNameValueList();
 
-            collection.url = Globals.baseURL + "dashboard/solr/getBundleIdBoundaryDates";
+            collection.url = Globals.baseURL + "service/logs/solr/boundarydates";
             collection.modelAttrName = "vNameValues";
             _.extend(collection.queryParams, {
                 "bundle_id": params.bundle_id

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
index c04aaf9..ef6dce5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
@@ -98,7 +98,7 @@ define(['require',
                     pageSize: 9999
                 }
 			});
-			this.topUsers.url = Globals.baseURL + "audit/getTopAuditUsers";
+			this.topUsers.url = Globals.baseURL + "audit/logs/users";
 			this.topUsers.modelAttrName = "graphData";
 			this.topResources = new VNameValueList([],{
 				state: {
@@ -106,7 +106,7 @@ define(['require',
                     pageSize: 9999
                 }
 			});
-			this.topResources.url = Globals.baseURL + "audit/getTopAuditResources";
+			this.topResources.url = Globals.baseURL + "audit/logs/resources";
 			this.topResources.modelAttrName = "graphData";		
 			//initialize colors
 			this.colors = (new d3.scale.category20c().range().slice().reverse()).concat(new d3.scale.category20b().range().slice().reverse());
@@ -408,7 +408,7 @@ define(['require',
 			obj.utcOffset = moment().utcOffset();
 			obj.startIndex =  this.topUsers.state.currentPage * this.topUsers.state.pageSize;
 			var params = $.param(_.extend({},this.topUsers.queryParams,obj));
-			var url = "api/v1/audit/exportUserTableToTextFile?"+ params;
+			var url = "api/v1/audit/logs/users/export?"+ params;
 			window.open(url);
 			this.onDialogClosed();
 		}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
index 0b570ac..a399cbf 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
@@ -27,9 +27,9 @@ define(['require',
 	'hbs!tmpl/audit/AuditTabLayoutView_tmpl',
 	'moment'
 ],function(require,Backbone,Globals,Utils,ViewUtils,VGroupList,VAuditLogList,VAuditLog,AuditTabLayoutViewTmpl,moment){
-    
+
     'use strict';
-    
+
     return Backbone.Marionette.Layout.extend(
 	/** @lends LogLevelView */
 	{
@@ -91,14 +91,14 @@ define(['require',
                     pageSize: 25
                 }
 			});
-			
+
 			this.componentsList = new VGroupList([],{
 				state: {
                     firstPage: 0,
                     pageSize: 99999
                 }
 			});
-			this.componentsList.url = Globals.baseURL + "audit/getAuditComponents";
+			this.componentsList.url = Globals.baseURL + "audit/logs/components";
 		},
 		bindEvents : function(){
 			this.listenTo(this.componentsList, "reset", function(col, abc){
@@ -115,13 +115,13 @@ define(['require',
 			this.listenTo(this.vent,"reinitialize:filter:mustNot",function(value){
             	this.reinitializeFilterMustNot(value);
             },this);
-			
+
 			this.listenTo(this.globalVent,"reinitialize:auditLogs",function(options){
             	this.vent.trigger("reinitialize:filter:mustNot reinitialize:filter:mustBe reinitialize:filter:logtime "+
             			"reinitialize:TopTenGraph",options);
             	this.fetchAuditLogs(options);
             },this);
-			
+
 		},
 		onRender : function(){
 			this.renderHistogram();
@@ -154,7 +154,7 @@ define(['require',
 					params : that.defaultParams
 				}));
 			})
-			
+
 		},
 		fetchAuditColumns : function(){
 			var that =this;
@@ -184,7 +184,7 @@ define(['require',
 		renderVSSearch : function(){
 			var that = this;
 			require(['views/tabs/VisualSearchView'], function(VisualSearchView){
-				
+
 				_.each(that.columns,function(v,i){
 					if(v.toLowerCase().indexOf("time") > 0 ){
 						//that.columns.splice(i, 1);
@@ -421,7 +421,7 @@ define(['require',
 					that.$("#loaderFeed").show();
 				},
 				success : function(data){
-					var dd=[]; 
+					var dd=[];
 					that.$("#spark").parent().show();
 					_.each(data.vnameValues,function(d){
 						dd.push(d.value);
@@ -452,8 +452,8 @@ define(['require',
 			if(values.mustNot){
 				this.ui.excludeComponents.select2('val',values.mustNot.split(","));
 			}else{
-				this.ui.excludeComponents.select2('val',[]);	
+				this.ui.excludeComponents.select2('val',[]);
 			}
 		}
 	});
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
index 42b94d5..4fb9b51 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/BubbleGraphTableLayoutView.js
@@ -31,7 +31,7 @@ define(['require',
 	'select2'
 ],function(require,Backbone,moment,Globals,Utils,ViewUtils,tip,VLogLevel,VLogList,VGraphInfo,BubbleGraphTableLayoutViewTmpl,JBDialog){
 	'use strict';
-	
+
 	return Backbone.Marionette.Layout.extend(
 	/** @lends BubbleGraphTableLayoutView */
 	{
@@ -96,7 +96,7 @@ define(['require',
                     pageSize: 25
                 }
             });
-			this.collection.url = Globals.baseURL + "dashboard/solr/logs_search";
+			this.collection.url = Globals.baseURL + "service/logs";
 			this.graphModel = new VGraphInfo();
 			this.bindEvents();
 			this.graphParams = this.params;
@@ -257,7 +257,7 @@ define(['require',
 					filterOpts : {},
 					paginatorOpts : {}
 				}));
-			});	
+			});
 		},
 		renderComponentList : function(){
 			var that = this;
@@ -416,7 +416,7 @@ define(['require',
 						displayOrder :6,
 						width : 6
 					}
-					
+
 			};
 			_.each(cols,function(c,k){
 				if(columns[k] == undefined){
@@ -502,7 +502,7 @@ define(['require',
 				this.ui.hostList.show();
 			}else
 				this.ui.componentList.show();
-				
+
 		},
 		bindContextMenuClick : function(){
 			var that = this;
@@ -543,13 +543,13 @@ define(['require',
 		        	that.selectionCallBack(selection,e)
 		        },1);
 
-		        
+
 		    });
 		},
 		selectionCallBack : function(selection,e){
 			this.RLogTable.currentView.$el.removeHighlight(true);
 			if(this.selectionText != selection.toString()){
-				this.selectionText = selection.toString(); 
+				this.selectionText = selection.toString();
 			}else{
 				$(".contextMenuBody [data-id='F']").show();
 				$(".contextMenuBody").hide();
@@ -561,8 +561,8 @@ define(['require',
 				$(".contextMenuBody").show();
 				$(".contextMenuBody").css({
 					'top':e.pageY - 40,
-					'left':e.pageX 
-				});  
+					'left':e.pageX
+				});
 			}else{
 				this.RLogTable.currentView.$el.removeHighlight(true);
 				$(".contextMenuBody [data-id='F']").show();
@@ -607,7 +607,7 @@ define(['require',
 			}else if ($el.data("id") === "C_M"){
 				this.globalVent.trigger("add:compare",$el.find('a'));
 			}
-			
+
 		},
 		renderDetailLogFileView : function(view){
 			var that = this;
@@ -655,12 +655,12 @@ define(['require',
 				this.ui.graph.text("no data");
 				return
 			}
-				
+
 			var root = {
 				name : "",
 				dataList : this.graphModel.get("graphData")
 			};
-			
+
 			var margin = 20;
 			this.ui.graph.empty();
 			//		var color = d3.scale.linear()
@@ -813,6 +813,6 @@ define(['require',
 			$('body').unbind("mouseup.contextMenu");
 		}
 	});
-	
-	
-});
\ No newline at end of file
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
index abd3740..b8c43bb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
@@ -68,14 +68,14 @@ define(['require',
 		                    pageSize: 99999
 		                }
 					});
-					this.componentsList.url = Globals.baseURL + "dashboard/getComponentListWithLevelCounts";
+					this.componentsList.url = Globals.baseURL + "service/logs/components/level/counts";
 					this.hostList = new VNodeList([],{
 						state: {
 		                    firstPage: 0,
 		                    pageSize: 99999
 		                }
 					});
-					this.hostList.url = Globals.baseURL + "dashboard/getHostListByComponent";
+					this.hostList.url = Globals.baseURL + "service/logs/hosts/components";
 				},
 				/** all events binding here */
 				bindEvents : function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentsView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentsView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentsView.js
index 66cc277..424236d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentsView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentsView.js
@@ -53,7 +53,7 @@ define(['require',
 		initialize: function(options) {
 			_.extend(this, _.pick(options,'vent'));
 			this.collection = new VLogLevel();
-			this.collection.url = Globals.baseURL+"dashboard/components_count";
+			this.collection.url = Globals.baseURL+"service/logs/components/count";
 			this.bindEvents();
 		},
 		onRender : function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/DashboardView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/DashboardView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/DashboardView.js
index c3fd9c2..35814ba 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/DashboardView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/DashboardView.js
@@ -26,7 +26,7 @@ define(['require',
 ],function(require,Backbone,Globals,DashboardviewTmpl,VGroupList){
     'use strict';
 
-	
+
 	var DashboardView = Backbone.Marionette.Layout.extend(
 	/** @lends DashboardView */
 	{
@@ -69,7 +69,7 @@ define(['require',
 			this.logRegions = new Backbone.Collection();
 			this.hostCollection = new VGroupList([],{});
 			this.cComponents = new VGroupList([],{});
-			this.cComponents.url = Globals.baseURL + "dashboard/components";
+			this.cComponents.url = Globals.baseURL + "service/logs/components";
 			this.cTime = new VGroupList(Globals.timeQueryLOV,{});
 		},
 		/** all events binding here */
@@ -115,7 +115,7 @@ define(['require',
 		},
 		onSearchLogClick : function(e){
 			var searchParams = this.getSearchparams();
-			
+
 			if(this.logRegions.length == 0){
 				var model = new Backbone.Model({
 					id : 1,
@@ -123,7 +123,7 @@ define(['require',
 				});
 				this.generateView(model);
 			}else{
-				
+
 				var existsMod = this.logRegions.find(function(m){
 					return JSON.stringify(searchParams) === JSON.stringify(m.get('params'))
 				});
@@ -159,7 +159,7 @@ define(['require',
 			require(['views/dashboard/LogDetailView'],function(LogDetailView){
 				region.show(new LogDetailView({model: model}));
 			})
-			
+
 		},
 		getSearchparams : function(){
 			var obj={hosts : null, components : null, time:null};
@@ -176,7 +176,7 @@ define(['require',
 					obj.time = this.ui.time.select2("val");
 			}
 			return obj;
-				
+
 		},
 		/** on close */
 		onClose: function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/GridTableLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/GridTableLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/GridTableLayoutView.js
index 1cbdef8..da930ea 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/GridTableLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/GridTableLayoutView.js
@@ -78,7 +78,7 @@ define(['require',
                 this.graphParams = {};
                 this.unit = (this.model.params && this.model.params.unit) ? this.model.params.unit : "+1HOUR";
                 this.firstRender = true;
-                this.collection.url = Globals.baseURL + "audit/getAnyGraphData";
+                this.collection.url = Globals.baseURL + "audit/logs/anygraph";
                 this.collection.modelAttrName = "graphData";
 
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostsView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostsView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostsView.js
index dd82130..2440fd0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostsView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/HostsView.js
@@ -53,7 +53,7 @@ define(['require',
 		initialize: function(options) {
 			_.extend(this, _.pick(options,'vent'));
 			this.collection = new VLogLevel();
-			this.collection.url = Globals.baseURL+"dashboard/hosts_count";
+			this.collection.url = Globals.baseURL+"service/logs/hosts/count";
 			this.bindEvents();
 		},
 		onRender : function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
index b57f7c1..d2ee3d8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/LogLevelBoxView.js
@@ -61,7 +61,7 @@ define(['require',
 		initialize: function(options) {
 			_.extend(this, _.pick(options,'vent','globalVent','params'));
 			this.logLevelList = new VLogLevelList();
-			this.logLevelList.url = Globals.baseURL + "dashboard/getLogLevelCounts";
+			this.logLevelList.url = Globals.baseURL + "service/logs/levels/counts/namevalues";
 			this.logLevelList.modelAttrName = "vNameValues";
 			this.bindEvents();
 		},

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
index 3be87f5..66593c4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
@@ -27,7 +27,7 @@ define(['require',
 	'd3.tip'
 ],function(require,Backbone,Utils,ViewUtils,Globals,MainLayoutViewTmpl){
     'use strict';
-	
+
 	var MainLayoutView = Backbone.Marionette.Layout.extend(
 	/** @lends MainLayoutView */
 	{
@@ -93,7 +93,7 @@ define(['require',
 //                    pageSize: 50
 //                }
 //            });
-//			this.collection.url = Globals.baseURL + "dashboard/solr/logs_search";
+//			this.collection.url = Globals.baseURL + "service/logs";
 			this.vent = new Backbone.Wreqr.EventAggregator();
 			this.dateUtil = Utils.dateUtil;
 			this.bindEvents();
@@ -107,7 +107,7 @@ define(['require',
             	setTimeout(function(){
             		that.reAdjustTab()
             	},1000);
-            	
+
             },this);
             this.listenTo(this.globalVent,"render:comparison:tab",function(options){
 				this.hideContextMenu();
@@ -211,7 +211,7 @@ define(['require',
 				$("html, body").animate({ scrollTop: 0 }, 500);
 				that.showTab(tabName);
 			});
-		
+
 		},
 		showTab : function(tabId){
 			this.$(".nav.nav-tabs li").removeClass("active");
@@ -242,7 +242,7 @@ define(['require',
 					globalVent:that.globalVent
 				}));
 			})
-			
+
 		},
 		renderComponents : function(){
 			var that = this;
@@ -274,7 +274,7 @@ define(['require',
 		renderTroubleShootTab:function(){
 			var that = this;
 			require(['views/troubleshoot/TroubleShootLayoutView'], function(TroubleShootLayoutView){
-				
+
 				that.RTroubleShoot.show(new TroubleShootLayoutView({
 					globalVent:that.globalVent
 				}));
@@ -345,7 +345,7 @@ define(['require',
 		var pack = d3.layout.pack()
 		    .padding(2)
 		    .size([diameter - margin, diameter - margin])
-		    .value(function(d) { 
+		    .value(function(d) {
 		    	return d.count; })
 		    .children(function(d){
 		    	return d.dataList;
@@ -381,9 +381,9 @@ define(['require',
 		  var circle = svg.selectAll("circle")
 		      .data(nodes)
 		    .enter().append("circle")
-		      .attr("class", function(d) { 
+		      .attr("class", function(d) {
 		    	  return d.parent ? d.children ? "node" : "node node--leaf "+d.name : "node node--root"; })
-		      .style("fill", function(d) { 
+		      .style("fill", function(d) {
 		    	  return d.children ? color(d.depth) : null; })
 		      .on("click", function(d) {
 		    	  if(d3.event.shiftKey){
@@ -391,7 +391,7 @@ define(['require',
 		    	  }else{
 		    		  if (focus !== d) zoom(d), d3.event.stopPropagation();
 		    	  }
-		    		   
+
 		      })
 		      .on('mouseover', function (d,i) {
                     if (d.x) {
@@ -403,7 +403,7 @@ define(['require',
                         tipCirclePack.hide(d);
                     }
                 });
-		  
+
 		  var text = svg.selectAll("text")
 		      .data(nodes)
 		    .enter().append("text")
@@ -418,7 +418,7 @@ define(['require',
 		    			  return "";
 		    	  }else
 		    		  return d.name;
-		    	   
+
 		      });
 
 		  var node = svg.selectAll("circle,text");
@@ -472,7 +472,7 @@ define(['require',
 			    		el.find('i').removeClass('fa-square-o').addClass('fa-check-square-o');
 			    		this.quickMenuCompare = false;
 			    		this.onCompareLink(el);
-			    	}        
+			    	}
 			    }else{
 			    	el.find('i').removeClass('fa-square-o').addClass('fa-check-square-o');
 			    }
@@ -643,7 +643,7 @@ define(['require',
 			  else {
 			    that.$('.scroller-right').hide();
 			  }
-			  
+
 			  if (getLeftPosi()<0) {
 			    that.$('.scroller-left').show();
 			  }
@@ -655,7 +655,7 @@ define(['require',
 
 
 			this.$('.scroller-right').click(function(e) {
-			  
+
 			 /* that.$('.scroller-left').fadeIn('slow');
 			  that.$('.scroller-right').fadeOut('slow');*/
 			  //console.log(widthOfHidden())
@@ -664,23 +664,23 @@ define(['require',
 			  		that.reAdjustTab();
 			 	 });
 			  }
-			 
+
 			});
 
 			this.$('.scroller-left').click(function() {
-			  
+
 				/*that.$('.scroller-right').fadeIn('slow');
 				that.$('.scroller-left').fadeOut('slow');*/
 			  	//console.log(getLeftPosi())
 			  	if(getLeftPosi() < 0){
 			  		that.$('.list').animate({left:"-="+(-40)+"px"},0,function(){
 			  		that.reAdjustTab();
-			  	});	
+			  	});
 			  	}
-			  	
-			});    
+
+			});
 		},
 	});
 	return MainLayoutView;
-	
-});
\ No newline at end of file
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/GlobalExclusionCompositeView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/GlobalExclusionCompositeView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/GlobalExclusionCompositeView.js
index a737eba..ffef322 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/GlobalExclusionCompositeView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dialog/GlobalExclusionCompositeView.js
@@ -50,7 +50,7 @@ define(['require',
                         pageSize: 1000
                     }
                 });
-                this.componentsList.url = Globals.baseURL + "dashboard/components";
+                this.componentsList.url = Globals.baseURL + "service/logs/components";
 
                 this.collection = this.exclusionObj.logMessageCollection;
                 if (this.collection.length == 0) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
index ba07600..95324d9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
@@ -52,7 +52,7 @@ define(['require',
                 events["click [data-value]"] = 'onLogLevelHeaderClick';
                 events["click #filterContent input[type='checkbox']"] = 'onAnyCheckboxClick';
                 events["click .overrideRow a"] = 'onEditHost';
-                
+
                 return events;
             },
 
@@ -76,8 +76,8 @@ define(['require',
                     }
                 });
 
-                this.componentsList.url = Globals.baseURL + "dashboard/components";
-                this.hostList.url = Globals.baseURL + "dashboard/hosts";
+                this.componentsList.url = Globals.baseURL + "service/logs/components";
+                this.hostList.url = Globals.baseURL + "service/logs/hosts";
                 this.model = new VUserFilter();
 
                 this.levelCollection = new Backbone.Collection();
@@ -100,7 +100,7 @@ define(['require',
             onRender: function() {
                 var that = this;
                 // this.setupSelect2Fields(this.levelCollection, "type", "type", "levelSelect2", 'Select Level');
-                
+
                 $.when(this.hostList.fetch({ reset: true }), this.componentsList.fetch({ reset: true }), this.model.fetch({})).then(function(c1, c2, m1) {
                     // if (!_.isUndefined(that.model.get('components'))) {
                     //     that.ui.componentSelect2.select2('val', that.model.get('components'));
@@ -116,7 +116,7 @@ define(['require',
 
                     //that.dataLevels = [];
                     //that.dataLevels = _.pluck(that.levelCollection.models, 'attributes');
-                    
+
                     //that.dataList = [];
                     //that.dataList = _.pluck(that.componentsList.models, 'attributes');
                     that.renderComponents();
@@ -148,13 +148,13 @@ define(['require',
             		var components = this.model.get("filter");
             		_.each(components,function(value,key){
             			var obj = components[key];
-            			
-            			if((_.isArray(obj.overrideLevels) && obj.overrideLevels.length) || 
+
+            			if((_.isArray(obj.overrideLevels) && obj.overrideLevels.length) ||
             					(_.isArray(obj.hosts) && obj.hosts.length) || obj.expiryTime){
             				var $el = that.$("input[data-name='"+key+"']").filter("[data-override]");
         					$el.click();
             			}
-            			
+
             			//setting override data
             			if(_.isArray(obj.overrideLevels)){
             				if(obj.overrideLevels.length){
@@ -202,7 +202,7 @@ define(['require',
             	_.each(this.levelCollection.models,function(model){
             		that.setCheckAllValue(model.get("type"));
             	});
-            	
+
             },
             onAnyCheckboxClick : function(e){
             	var $el = $(e.currentTarget);
@@ -363,4 +363,4 @@ define(['require',
             }
         });
 
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
index 0085f06..76d0537 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
@@ -81,10 +81,10 @@ define(['require',
                 this.firstRender = true;
                 if (!this.viewType || this.viewType == Globals.graphType.HISTOGRAM.value) {
                     this.histogramView = true;
-                    this.collection.url = Globals.baseURL + "dashboard/getHistogramData";
+                    this.collection.url = Globals.baseURL + "service/logs/histogram";
                     this.collection.modelAttrName = "graphData";
                 } else {
-                    this.collection.url = Globals.baseURL + "audit/getAuditLineGraphData";
+                    this.collection.url = Globals.baseURL + "audit/logs/linegraph";
                     this.collection.modelAttrName = "graphData";
                     this.lineView = true;
                 }
@@ -246,7 +246,7 @@ define(['require',
                                                  }
                                 }
                         }
-                        Obj.values = newObj;                      
+                        Obj.values = newObj;
                     }
 
                     if (that.histogramView) {
@@ -282,7 +282,7 @@ define(['require',
                     generate: function() {
                         /* var parentWidth = (that.ui.histoGraph.find('svg').parent().width()),
                              parentHeight = (that.ui.histoGraph.find('svg').parent().height())
-                             width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding 
+                             width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding
                               height = ((parentHeight === 0) ? (640) : (parentHeight)) // -15 because  parent has 15 padding */
                         if (that.histogramView) {
                             that.chart = nv.models.multiBarChart()
@@ -290,7 +290,7 @@ define(['require',
                                  .height(height)*/
                                 .stacked(true)
                                 .showControls(false);
-                            that.chart.groupSpacing(0.6) // for bar width and aspace 
+                            that.chart.groupSpacing(0.6) // for bar width and aspace
                         } else {
                             that.chart = nv.models.lineChart().options({
                                 transitionDuration: 300,
@@ -484,7 +484,7 @@ define(['require',
                         that.vent.trigger("date:setDate", {
                                 'from': that.dateUtil.getMomentObject(that.brushValue[0]),
                                 'to': that.dateUtil.getMomentObject(that.brushValue[1])
-                          });          
+                          });
                      }
                     /*}*/
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
index b0339f3..47393bb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GridGraphLayoutView.js
@@ -72,7 +72,7 @@ define(['require',
 
                         }
                     });
-                	this.collection.url = Globals.baseURL + "audit/getAnyGraphData";
+                	this.collection.url = Globals.baseURL + "audit/logs/anygraph";
                     this.collection.modelAttrName = "graphData";
                 }
                 this.dateUtil = Utils.dateUtil;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonView.js
index 1d26dc4..f922087 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonView.js
@@ -65,7 +65,7 @@ define(['require',
                     }
                 });
                 this.dateUtil = Utils.dateUtil;
-                this.collection.url = Globals.baseURL + "dashboard/solr/logs_search";
+                this.collection.url = Globals.baseURL + "service/logs";
                 this.bindEvents();
                 this.dateRangeLabel = "Last 1 Hour"
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/HierarchyTabLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/HierarchyTabLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/HierarchyTabLayoutView.js
index 6c6a77e..e4d327c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/HierarchyTabLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/HierarchyTabLayoutView.js
@@ -79,7 +79,7 @@ define(['require',
 		initialize: function(options) {
 			_.extend(this, _.pick(options,'globalVent'));
 //			this.logLevelList = new VLogLevelList();
-//			this.logLevelList.url = Globals.baseURL + "dashboard/getLogLevelCounts";
+//			this.logLevelList.url = Globals.baseURL + "service/logs/levels/counts/namevalues";
 //			this.logLevelList.modelAttrName = "vNameValues";
 			this.columnCollection = new VLogList([],{
 				state: {
@@ -93,15 +93,15 @@ define(['require',
                     pageSize: 99999
                 }
 			});
-			this.componentsList.url = Globals.baseURL + "dashboard/components";
+			this.componentsList.url = Globals.baseURL + "service/logs/components";
 			this.vent = new Backbone.Wreqr.EventAggregator();
-			
+
 			this.defaultParams = ViewUtils.getDefaultParamsForHierarchy();
 			this.bindEvents();
 		},
 		applyParamsDate:function(date){
 			if (date) {
-				var dateString  = date.split(','); 
+				var dateString  = date.split(',');
 				 if(dateString.length){
 				 	var checkDate = Utils.dateUtil.getMomentUTC(dateString[0]);
 				 	if(checkDate.isValid()){
@@ -112,7 +112,7 @@ define(['require',
 				 		}
 				 		return  checkDate.toJSON();
 				 	}
-				 }	
+				 }
 			}
 		},
 		bindEvents : function(){
@@ -144,7 +144,7 @@ define(['require',
 			this.listenTo(this.vent,"tab:refresh",function(params){
 				this.reRenderComponents(params);
 			},this);
-			
+
 			this.listenTo(this.globalVent,"reinitialize:serviceLogs",function(options){
             	this.vent.trigger("reinitialize:filter:tree reinitialize:filter:include:exclude reinitialize:filter:bubbleTable"+
             			" reinitialize:filter:mustNot reinitialize:filter:mustBe reinitialize:filter:level reinitialize:filter:logtime",options);
@@ -163,7 +163,7 @@ define(['require',
 			this.componentsList.fetch({reset:true});
 		},
 		onShow:function(){
-			
+
 			//this.REventHistory.currentView.genrateTimeline();
 
 		},
@@ -267,7 +267,7 @@ define(['require',
 					params : that.defaultParams,
 				}));
             });
-		
+
 
 		},
 		renderVSSearch : function(){
@@ -294,7 +294,7 @@ define(['require',
 								query : query
 							};
 						}
-					
+
 				}));
             });
 		},
@@ -309,7 +309,7 @@ define(['require',
 					placeholder : "Include Search",
 					vent : that.vent,
 					globalVent:that.globalVent,
-					customOptions : columns,  
+					customOptions : columns,
 					eventName : Globals.eventName.serviceLogsIncludeColumns,
 					myFormatData : function(query,searchCollection){
 						var obj=[];
@@ -431,7 +431,7 @@ define(['require',
 				this.ui.excludeComponents.select2('val',values.mustNot.split(","));
 			else
 				this.ui.excludeComponents.select2('val',[]);
-				
+
 		},
 		reRenderComponents : function(params){
 			var iComponents = this.ui.includeComponents.val(),eComponents = this.ui.excludeComponents.val(),that=this;
@@ -455,20 +455,20 @@ define(['require',
 			} else{
 				this.ui.advanceSearch.hide();
 				this.ui.applySearch.hide();
-				this.ui.basicSearch.show();	
+				this.ui.basicSearch.show();
 				obj = this.getIncludeExcludeColValues();
 				obj.advanceSearch = null;
 			}
-			
+
 			this.vent.trigger('main:search',obj);
-			
+
 		},
 		applySearchBtn : function(){
 			var obj = {}
 			obj.advanceSearch = this.RAdvanceSearch.currentView.ui.searchArea.val();
 
 				this.vent.trigger('main:search',obj);
-			
+
 		},
 		getIncludeExcludeColValues : function(){
 			return _.extend(this.RVisualSearchIncCol.currentView.formatData(this.RVisualSearchIncCol.currentView.visualSearch.searchBox.value(),this.RVisualSearchIncCol.currentView.visualSearch.searchQuery),
@@ -483,6 +483,6 @@ define(['require',
 			}
 		}
 	});
-	
-	
-});
\ No newline at end of file
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
index 4af4670..4b42eab 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
@@ -99,7 +99,7 @@ define(['require',
                         pageSize: 25
                     }
                 });
-                this.logFileCollection.url = Globals.baseURL + "dashboard/solr/logs_search";
+                this.logFileCollection.url = Globals.baseURL + "service/logs";
                 this.vent = new Backbone.Wreqr.EventAggregator();
                 this.bindEvents();
                 this.commonTableOptions = {
@@ -203,7 +203,7 @@ define(['require',
                 //				{startIndex : this.logFileCollection.state.currentPage * this.logFileCollection.state.pageSize},obj),
                 //				'component','from','to','host','level','unit','startIndex','pageSize','format','utcOffset'));
                 var params = $.param(_.extend({}, this.logFileCollection.queryParams, obj));
-                var url = "api/v1/dashboard/exportToTextFile?" + params;
+                var url = "api/v1/service/logs/export?" + params;
                 window.open(url);
                 this.onDialogClosed();
             },
@@ -256,7 +256,7 @@ define(['require',
                             var top = element.offset().top;
                             element.addClass('highlightLog');
                             $("html, body").animate({ scrollTop: (top - 200) }, 1);
-                            /*setTimeout(function(){ 
+                            /*setTimeout(function(){
                             	element.addClass('fadeOutColor')
                             	setTimeout(function(){element.removeClass('fadeOutColor highlightLog');},4000)
                             },6000);*/
@@ -599,7 +599,7 @@ define(['require',
             },
             initializeContextMenu: function() {
                 var that = this;
-                
+
                 $('body').on("mouseup.contextMenuLogFile", function(e) {
                     var selection;
                     if (window.getSelection) {
@@ -657,7 +657,7 @@ define(['require',
                     }else if(type === "IA" || type === "EA"){
     					this.vent.trigger("toggle:facet",{viewName:((type === "IA") ? "include" : "exclude") +"ServiceColumns",
     						key:Globals.serviceLogsColumns["log_message"],value:"*"+this.selectionText+"*"});
-    				} 
+    				}
                     else {
                         //this.vent.trigger("add:include:exclude",{type:type,value:this.selectionText});
                         this.vent.trigger("toggle:facet", { viewName: ((type === "I") ? "include" : "exclude") + "ServiceColumns", key: Globals.serviceLogsColumns["log_message"], value: this.selectionText });
@@ -892,4 +892,4 @@ define(['require',
                 $('body').unbind("mouseup.contextMenuLogFile");
             }
         });
-});
\ No newline at end of file
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
index cf33e68..bcd2c80 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/TreeView.js
@@ -80,7 +80,7 @@ define(['require',
 		initialize: function(options) {
 			_.extend(this, _.pick(options,'vent','globalVent','params'));
 //			this.collection = new VLogLevel();
-//			this.collection.url = Globals.baseURL+"dashboard/components_count";
+//			this.collection.url = Globals.baseURL+"service/logs/components/count";
 			this.treeModel = new VCommonModel();
 			this.searchParams = (this.params)? this.params :{};
 			this.bindEvents();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
index c1655f0..66f851c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/troubleshoot/TroubleShootLayoutView.js
@@ -82,7 +82,7 @@ define(['backbone',
 
                     }
                 });
-                this.serviceLogsCollection.url = Globals.baseURL + "dashboard/getAnyGraphData";
+                this.serviceLogsCollection.url = Globals.baseURL + "service/logs/anygraph";
                 this.serviceLogsCollection.modelAttrName = "graphData";
                 
             	this.topUsers = new VNameValueList([],{
@@ -91,7 +91,7 @@ define(['backbone',
                         pageSize: 9999
                     }
     			});
-    			this.topUsers.url = Globals.baseURL + "audit/getTopAuditUsers";
+    			this.topUsers.url = Globals.baseURL + "audit/logs/users";
     			this.topUsers.modelAttrName = "graphData";
     			
     			this.serviceLoadCollection = new VLogList([], {
@@ -101,7 +101,7 @@ define(['backbone',
 
                     }
                 });
-            	this.serviceLoadCollection.url = Globals.baseURL + "audit/getServiceLoad";
+            	this.serviceLoadCollection.url = Globals.baseURL + "audit/logs/serviceload";
                 this.serviceLoadCollection.modelAttrName = "graphData";
             },
             bindEvents : function(){

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
index 9b589f1..8999e01 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
@@ -78,10 +78,10 @@ define(['require',
                 this.firstRender = true;
                 if (!this.viewType || this.viewType == Globals.graphType.HISTOGRAM.value) {
                     this.histogramView = true;
-                    this.collection.url = Globals.baseURL + "dashboard/getHistogramData";
+                    this.collection.url = Globals.baseURL + "service/logs/histogram";
                     this.collection.modelAttrName = "graphData";
                 } else {
-                    this.collection.url = Globals.baseURL + "audit/getAuditLineGraphData";
+                    this.collection.url = Globals.baseURL + "audit/logs/linegraph";
                     this.collection.modelAttrName = "graphData";
                     this.lineView = true;
                 }
@@ -323,7 +323,7 @@ define(['require',
                     generate: function() {
                         /* var parentWidth = (that.ui.histoGraph.find('svg').parent().width()),
                              parentHeight = (that.ui.histoGraph.find('svg').parent().height())
-                             width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding 
+                             width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding
                               height = ((parentHeight === 0) ? (640) : (parentHeight)) // -15 because  parent has 15 padding */
                         if (that.histogramView) {
                             that.chart = nv.models.multiBarChart()
@@ -331,7 +331,7 @@ define(['require',
                                  .height(height)*/
                                 .stacked(true)
                                 .showControls(false);
-                            that.chart.groupSpacing(0.6) // for bar width and aspace 
+                            that.chart.groupSpacing(0.6) // for bar width and aspace
                         } else {
                             that.chart = nv.models.lineChart().options({
                                 transitionDuration: 300,
@@ -444,8 +444,8 @@ define(['require',
                         that.$(".box").resize(function() {
                             //                             var parentWidth = (that.ui.histoGraph.find('svg').parent().width()),
                             //                                 parentHeight = (that.ui.histoGraph.find('svg').parent().height()),
-                            //                                 width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding 
-                            //                                 height = ((parentHeight === 0) ? (240) : (parentHeight)) // -15 because  parent has 15 padding 
+                            //                                 width = ((parentWidth === 0) ? (891) : (parentWidth)), // -15 because  parent has 15 padding
+                            //                                 height = ((parentHeight === 0) ? (240) : (parentHeight)) // -15 because  parent has 15 padding
                             //                             graph.width(width).height(height);
                             //                          if (that.parentView) {
                             //                                  var elem = that.parentView.$el.find('[data-id="rHistogramGraph"] svg');

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
index 276a65e..061c607 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/logging/LoggingRequestHelperImpl.java
@@ -62,9 +62,9 @@ public class LoggingRequestHelperImpl implements LoggingRequestHelper {
 
   private static final String LOGSEARCH_ADMIN_PASSWORD_PROPERTY_NAME = "logsearch_admin_password";
 
-  private static final String LOGSEARCH_QUERY_PATH = "/service/dashboard/solr/logs_search";
+  private static final String LOGSEARCH_QUERY_PATH = "/api/v1/service/logs";
 
-  private static final String LOGSEARCH_GET_LOG_LEVELS_PATH = "/service/dashboard/getLogLevelCounts";
+  private static final String LOGSEARCH_GET_LOG_LEVELS_PATH = "/api/v1/service/logs/levels/counts/namevalues";
 
   private static final String LOGSEARCH_ADMIN_CREDENTIAL_NAME = "logsearch.admin.credential";
 


[50/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bc7e0aa7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bc7e0aa7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bc7e0aa7

Branch: refs/heads/branch-dev-logsearch
Commit: bc7e0aa73400b60816fecf506d49a40cf19a80ac
Parents: 3f790c8
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Sep 7 22:24:51 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:01 2016 +0200

----------------------------------------------------------------------
 .../org/apache/ambari/logfeeder/LogFeeder.java  | 594 +++++++------------
 .../ambari/logfeeder/common/ConfigBlock.java    |  71 +--
 .../logfeeder/common/LogFeederConstants.java    |  39 ++
 .../apache/ambari/logfeeder/filter/Filter.java  |  55 +-
 .../ambari/logfeeder/filter/FilterGrok.java     |  94 +--
 .../ambari/logfeeder/filter/FilterJSON.java     |   8 +-
 .../ambari/logfeeder/filter/FilterKeyValue.java |  44 +-
 .../logfeeder/input/AbstractInputFile.java      | 319 ++++++++++
 .../apache/ambari/logfeeder/input/Input.java    | 313 +++++-----
 .../ambari/logfeeder/input/InputFile.java       | 503 ++--------------
 .../ambari/logfeeder/input/InputManager.java    | 379 ++++++++++++
 .../ambari/logfeeder/input/InputMarker.java     |  17 +-
 .../apache/ambari/logfeeder/input/InputMgr.java | 451 --------------
 .../ambari/logfeeder/input/InputS3File.java     | 424 +------------
 .../ambari/logfeeder/input/InputSimulate.java   |  40 +-
 .../logfeeder/input/reader/GZIPReader.java      |  23 +-
 .../input/reader/LogsearchReaderFactory.java    |   8 +-
 .../logconfig/FetchConfigFromSolr.java          | 194 ------
 .../logfeeder/logconfig/FilterLogData.java      |  83 +++
 .../logfeeder/logconfig/LogConfigFetcher.java   | 168 ++++++
 .../logfeeder/logconfig/LogConfigHandler.java   | 189 ++++++
 .../logfeeder/logconfig/LogFeederConstants.java |  34 --
 .../logfeeder/logconfig/LogFeederFilter.java    |  90 +++
 .../logconfig/LogFeederFilterWrapper.java       |  55 ++
 .../logfeeder/logconfig/LogfeederScheduler.java |  59 --
 .../logconfig/filter/ApplyLogFilter.java        |  62 --
 .../logconfig/filter/DefaultDataFilter.java     |  49 --
 .../logconfig/filter/FilterLogData.java         |  53 --
 .../apache/ambari/logfeeder/mapper/Mapper.java  |  14 +-
 .../ambari/logfeeder/mapper/MapperDate.java     |  32 +-
 .../logfeeder/mapper/MapperFieldName.java       |  20 +-
 .../logfeeder/mapper/MapperFieldValue.java      |  31 +-
 .../logfeeder/metrics/LogFeederAMSClient.java   |  10 +-
 .../ambari/logfeeder/metrics/MetricCount.java   |  31 -
 .../ambari/logfeeder/metrics/MetricData.java    |  46 ++
 .../logfeeder/metrics/MetricsManager.java       | 168 ++++++
 .../ambari/logfeeder/metrics/MetricsMgr.java    | 178 ------
 .../apache/ambari/logfeeder/output/Output.java  |  13 +-
 .../ambari/logfeeder/output/OutputData.java     |   8 +-
 .../ambari/logfeeder/output/OutputDevNull.java  |   7 +-
 .../ambari/logfeeder/output/OutputFile.java     |  42 +-
 .../ambari/logfeeder/output/OutputHDFSFile.java |  70 +--
 .../ambari/logfeeder/output/OutputKafka.java    |  58 +-
 .../ambari/logfeeder/output/OutputManager.java  | 250 ++++++++
 .../ambari/logfeeder/output/OutputMgr.java      | 263 --------
 .../ambari/logfeeder/output/OutputS3File.java   |  41 +-
 .../ambari/logfeeder/output/OutputSolr.java     |  62 +-
 .../logfeeder/output/S3LogPathResolver.java     |   6 +-
 .../logfeeder/output/S3OutputConfiguration.java |   5 +-
 .../ambari/logfeeder/output/S3Uploader.java     |  64 +-
 .../logfeeder/output/spool/LogSpooler.java      |  23 +-
 .../output/spool/LogSpoolerContext.java         |   2 +-
 .../output/spool/LogSpoolerException.java       |   2 +-
 .../output/spool/RolloverCondition.java         |   2 +-
 .../logfeeder/output/spool/RolloverHandler.java |   2 +-
 .../apache/ambari/logfeeder/util/AWSUtil.java   |  52 +-
 .../apache/ambari/logfeeder/util/AliasUtil.java | 103 ++--
 .../ambari/logfeeder/util/CompressionUtil.java  |  15 +-
 .../apache/ambari/logfeeder/util/DateUtil.java  |  39 +-
 .../apache/ambari/logfeeder/util/FileUtil.java  |  66 ++-
 .../ambari/logfeeder/util/LogFeederUtil.java    | 511 +++++-----------
 .../logfeeder/util/LogfeederHDFSUtil.java       |  58 +-
 .../ambari/logfeeder/util/PlaceholderUtil.java  |  32 +-
 .../apache/ambari/logfeeder/util/S3Util.java    |  81 +--
 .../apache/ambari/logfeeder/util/SolrUtil.java  | 186 ------
 .../ambari/logfeeder/view/VLogfeederFilter.java |  90 ---
 .../logfeeder/view/VLogfeederFilterWrapper.java |  55 --
 .../org/apache/ambari/logfeeder/AppTest.java    | 116 ----
 .../ambari/logfeeder/filter/FilterGrokTest.java |  55 +-
 .../ambari/logfeeder/filter/FilterJSONTest.java |  41 +-
 .../logfeeder/filter/FilterKeyValueTest.java    |  34 +-
 .../ambari/logfeeder/input/InputFileTest.java   |  24 +-
 .../logfeeder/input/InputManagerTest.java       | 241 ++++++++
 .../logconfig/LogConfigHandlerTest.java         | 117 ++++
 .../ambari/logfeeder/mapper/MapperDateTest.java |  17 +-
 .../logfeeder/mapper/MapperFieldNameTest.java   |   2 +-
 .../logfeeder/mapper/MapperFieldValueTest.java  |   2 +-
 .../logfeeder/metrics/MetrcisManagerTest.java   | 128 ++++
 .../logfeeder/output/OutputKafkaTest.java       |   5 +-
 .../logfeeder/output/OutputManagerTest.java     | 256 ++++++++
 .../logfeeder/output/OutputS3FileTest.java      |  17 +-
 .../ambari/logfeeder/output/OutputSolrTest.java |   5 +-
 .../logfeeder/output/S3LogPathResolverTest.java |   2 +-
 .../ambari/logfeeder/output/S3UploaderTest.java |  42 +-
 .../logfeeder/output/spool/LogSpoolerTest.java  |   2 +-
 .../ambari/logfeeder/util/AWSUtilTest.java      |  29 -
 .../logfeeder/util/PlaceholderUtilTest.java     |  20 +-
 .../ambari/logfeeder/util/S3UtilTest.java       |   4 +-
 .../src/test/resources/logfeeder.properties     |  20 +
 89 files changed, 3854 insertions(+), 4481 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
index 373d743..6d0f22c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
@@ -20,14 +20,10 @@
 package org.apache.ambari.logfeeder;
 
 import java.io.BufferedInputStream;
-import java.io.BufferedReader;
 import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStreamReader;
 import java.lang.reflect.Type;
 import java.util.ArrayList;
-import java.util.Collection;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
@@ -39,18 +35,21 @@ import java.util.Set;
 
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.input.InputMgr;
+import org.apache.ambari.logfeeder.input.InputManager;
 import org.apache.ambari.logfeeder.input.InputSimulate;
-import org.apache.ambari.logfeeder.logconfig.LogfeederScheduler;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.metrics.MetricsMgr;
+import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.metrics.MetricsManager;
 import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.AliasUtil;
 import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -58,171 +57,142 @@ import org.apache.log4j.Logger;
 import com.google.gson.reflect.TypeToken;
 
 public class LogFeeder {
-  private static final Logger logger = Logger.getLogger(LogFeeder.class);
+  private static final Logger LOG = Logger.getLogger(LogFeeder.class);
 
   private static final int LOGFEEDER_SHUTDOWN_HOOK_PRIORITY = 30;
+  private static final int CHECKPOINT_CLEAN_INTERVAL_MS = 24 * 60 * 60 * 60 * 1000; // 24 hours
 
-  private Collection<Output> outputList = new ArrayList<Output>();
+  private OutputManager outputManager = new OutputManager();
+  private InputManager inputManager = new InputManager();
+  private MetricsManager metricsManager = new MetricsManager();
 
-  private OutputMgr outMgr = new OutputMgr();
-  private InputMgr inputMgr = new InputMgr();
-  private MetricsMgr metricsMgr = new MetricsMgr();
+  public static Map<String, Object> globalConfigs = new HashMap<>();
 
-  public static Map<String, Object> globalMap = null;
-  private String[] inputParams;
-
-  private List<Map<String, Object>> globalConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> inputConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> filterConfigList = new ArrayList<Map<String, Object>>();
-  private List<Map<String, Object>> outputConfigList = new ArrayList<Map<String, Object>>();
+  private List<Map<String, Object>> inputConfigList = new ArrayList<>();
+  private List<Map<String, Object>> filterConfigList = new ArrayList<>();
+  private List<Map<String, Object>> outputConfigList = new ArrayList<>();
   
-  private int checkPointCleanIntervalMS = 24 * 60 * 60 * 60 * 1000; // 24 hours
   private long lastCheckPointCleanedMS = 0;
-  
-  private static boolean isLogfeederCompleted = false;
-  
+  private boolean isLogfeederCompleted = false;
   private Thread statLoggerThread = null;
 
-  private LogFeeder(String[] args) {
-    inputParams = args;
+  private LogFeeder() {}
+
+  public void run() {
+    try {
+      init();
+      monitor();
+      waitOnAllDaemonThreads();
+    } catch (Throwable t) {
+      LOG.fatal("Caught exception in main.", t);
+      System.exit(1);
+    }
   }
 
   private void init() throws Throwable {
+    Date startTime = new Date();
 
-    LogFeederUtil.loadProperties("logfeeder.properties", inputParams);
-
-    String configFiles = LogFeederUtil.getStringProperty("logfeeder.config.files");
-    logger.info("logfeeder.config.files=" + configFiles);
+    loadConfigFiles();
+    addSimulatedInputs();
+    mergeAllConfigs();
     
-    String[] configFileList = null;
-    if (configFiles != null) {
-      configFileList = configFiles.split(",");
-    }
-    //list of config those are there in cmd line config dir , end with .json
-    String[] cmdLineConfigs = getConfigFromCmdLine();
-    //merge both config
-    String mergedConfigList[] = LogFeederUtil.mergeArray(configFileList,
-        cmdLineConfigs);
-    //mergedConfigList is null then set default conifg 
-    if (mergedConfigList == null || mergedConfigList.length == 0) {
-      mergedConfigList = LogFeederUtil.getStringProperty("config.file",
-          "config.json").split(",");
-    }
-    for (String configFileName : mergedConfigList) {
-      logger.info("Going to load config file:" + configFileName);
-      //escape space from config file path
-      configFileName= configFileName.replace("\\ ", "%20");
+    LogConfigHandler.handleConfig();
+    
+    outputManager.init();
+    inputManager.init();
+    metricsManager.init();
+    
+    LOG.debug("==============");
+    
+    Date endTime = new Date();
+    LOG.info("Took " + (endTime.getTime() - startTime.getTime()) + " ms to initialize");
+  }
+
+  private void loadConfigFiles() throws Exception {
+    List<String> configFiles = getConfigFiles();
+    for (String configFileName : configFiles) {
+      LOG.info("Going to load config file:" + configFileName);
+      configFileName = configFileName.replace("\\ ", "%20");
       File configFile = new File(configFileName);
       if (configFile.exists() && configFile.isFile()) {
-        logger.info("Config file exists in path."
-          + configFile.getAbsolutePath());
+        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
         loadConfigsUsingFile(configFile);
       } else {
-        // Let's try to load it from class loader
-        logger.info("Trying to load config file from classloader: "
-          + configFileName);
+        LOG.info("Trying to load config file from classloader: " + configFileName);
         loadConfigsUsingClassLoader(configFileName);
-        logger.info("Loaded config file from classloader: "
-          + configFileName);
+        LOG.info("Loaded config file from classloader: " + configFileName);
       }
     }
-    
-    addSimulatedInputs();
-    
-    mergeAllConfigs();
-    
-    LogfeederScheduler.INSTANCE.start();
-    
-    outMgr.setOutputList(outputList);
-    for (Output output : outputList) {
-      output.init();
-    }
-    inputMgr.init();
-    metricsMgr.init();
-    logger.debug("==============");
   }
 
-  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
-    BufferedInputStream fileInputStream = null;
-    BufferedReader br = null;
-    try {
-      fileInputStream = (BufferedInputStream) this
-        .getClass().getClassLoader()
-        .getResourceAsStream(configFileName);
-      if (fileInputStream != null) {
-        br = new BufferedReader(new InputStreamReader(
-          fileInputStream));
-        String configData = readFile(br);
-        loadConfigs(configData);
-      } else {
-        throw new Exception("Can't find configFile=" + configFileName);
-      }
-    } finally {
-      if (br != null) {
-        try {
-          br.close();
-        } catch (IOException e) {
-        }
-      }
+  private List<String> getConfigFiles() {
+    List<String> configFiles = new ArrayList<>();
+    
+    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
+    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
+    if (logfeederConfigFilesProperty != null) {
+      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
+    }
 
-      if (fileInputStream != null) {
-        try {
-          fileInputStream.close();
-        } catch (IOException e) {
-        }
+    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
+    if (StringUtils.isNotEmpty(inputConfigDir)) {
+      File configDirFile = new File(inputConfigDir);
+      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
+      for (File inputConfigFile : inputConfigFiles) {
+        configFiles.add(inputConfigFile.getAbsolutePath());
       }
     }
+    
+    if (CollectionUtils.isEmpty(configFiles)) {
+      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
+      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
+    }
+    
+    return configFiles;
   }
 
-  /**
-   * This method loads the configurations from the given file.
-   */
   private void loadConfigsUsingFile(File configFile) throws Exception {
-    FileInputStream fileInputStream = null;
     try {
-      fileInputStream = new FileInputStream(configFile);
-      BufferedReader br = new BufferedReader(new InputStreamReader(
-        fileInputStream));
-      String configData = readFile(br);
+      String configData = FileUtils.readFileToString(configFile);
       loadConfigs(configData);
     } catch (Exception t) {
-      logger.error("Error opening config file. configFilePath="
-        + configFile.getAbsolutePath());
+      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
       throw t;
-    } finally {
-      if (fileInputStream != null) {
-        try {
-          fileInputStream.close();
-        } catch (Throwable t) {
-          // ignore
-        }
-      }
+    }
+  }
+
+  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
+    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
+      String configData = IOUtils.toString(fis);
+      loadConfigs(configData);
     }
   }
 
   @SuppressWarnings("unchecked")
   private void loadConfigs(String configData) throws Exception {
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
-    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(
-      configData, type);
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
+    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
 
     // Get the globals
     for (String key : configMap.keySet()) {
-      if (key.equalsIgnoreCase("global")) {
-        globalConfigList.add((Map<String, Object>) configMap.get(key));
-      } else if (key.equalsIgnoreCase("input")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        inputConfigList.addAll(mapList);
-      } else if (key.equalsIgnoreCase("filter")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        filterConfigList.addAll(mapList);
-      } else if (key.equalsIgnoreCase("output")) {
-        List<Map<String, Object>> mapList = (List<Map<String, Object>>) configMap
-          .get(key);
-        outputConfigList.addAll(mapList);
+      switch (key) {
+        case "global" :
+          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
+          break;
+        case "input" :
+          List<Map<String, Object>> inputConfig = (List<Map<String, Object>>) configMap.get(key);
+          inputConfigList.addAll(inputConfig);
+          break;
+        case "filter" :
+          List<Map<String, Object>> filterConfig = (List<Map<String, Object>>) configMap.get(key);
+          filterConfigList.addAll(filterConfig);
+          break;
+        case "output" :
+          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
+          outputConfigList.addAll(outputConfig);
+          break;
+        default :
+          LOG.warn("Unknown config key: " + key);
       }
     }
   }
@@ -244,231 +214,175 @@ public class LogFeeder {
   }
 
   private void mergeAllConfigs() {
-    globalMap = mergeConfigs(globalConfigList);
+    loadOutputs();
+    loadInputs();
+    loadFilters();
+    
+    assignOutputsToInputs();
+  }
 
-    sortBlocks(filterConfigList);
-    // First loop for output
+  private void loadOutputs() {
     for (Map<String, Object> map : outputConfigList) {
       if (map == null) {
         continue;
       }
-      mergeBlocks(globalMap, map);
+      mergeBlocks(globalConfigs, map);
 
       String value = (String) map.get("destination");
-      Output output;
-      if (value == null || value.isEmpty()) {
-        logger.error("Output block doesn't have destination element");
-        continue;
-      }
-      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.OUTPUT, ALIAS_PARAM.KLASS);
-      if (classFullName == null || classFullName.isEmpty()) {
-        logger.error("Destination block doesn't have output element");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Output block doesn't have destination element");
         continue;
       }
-      output = (Output) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.OUTPUT);
-
+      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
       if (output == null) {
-        logger.error("Destination Object is null");
+        LOG.error("Output object could not be found");
         continue;
       }
-
       output.setDestination(value);
       output.loadConfig(map);
 
-      // We will only check for is_enabled out here. Down below we will
-      // check whether this output is enabled for the input
-      boolean isEnabled = output.getBooleanValue("is_enabled", true);
-      if (isEnabled) {
-        outputList.add(output);
+      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
+      if (output.getBooleanValue("is_enabled", true)) {
         output.logConfgs(Level.INFO);
+        outputManager.add(output);
       } else {
-        logger.info("Output is disabled. So ignoring it. "
-          + output.getShortDescription());
+        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
       }
     }
+  }
 
-    // Second loop for input
+  private void loadInputs() {
     for (Map<String, Object> map : inputConfigList) {
       if (map == null) {
         continue;
       }
-      mergeBlocks(globalMap, map);
+      mergeBlocks(globalConfigs, map);
 
       String value = (String) map.get("source");
-      Input input;
-      if (value == null || value.isEmpty()) {
-        logger.error("Input block doesn't have source element");
-        continue;
-      }
-      String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.INPUT, ALIAS_PARAM.KLASS);
-      if (classFullName == null || classFullName.isEmpty()) {
-        logger.error("Source block doesn't have source element");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Input block doesn't have source element");
         continue;
       }
-      input = (Input) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.INPUT);
-
+      Input input = (Input) AliasUtil.getClassInstance(value, AliasType.INPUT);
       if (input == null) {
-        logger.error("Source Object is null");
+        LOG.error("Input object could not be found");
         continue;
       }
-
       input.setType(value);
       input.loadConfig(map);
 
       if (input.isEnabled()) {
-        input.setOutputMgr(outMgr);
-        input.setInputMgr(inputMgr);
-        inputMgr.add(input);
+        input.setOutputManager(outputManager);
+        input.setInputManager(inputManager);
+        inputManager.add(input);
         input.logConfgs(Level.INFO);
       } else {
-        logger.info("Input is disabled. So ignoring it. "
-          + input.getShortDescription());
+        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
       }
     }
+  }
+
+  private void loadFilters() {
+    sortFilters();
 
-    // Third loop is for filter, but we will have to create a filter
-    // instance for each input, so it can maintain the state per input
     List<Input> toRemoveInputList = new ArrayList<Input>();
-    for (Input input : inputMgr.getInputList()) {
-      Filter prevFilter = null;
+    for (Input input : inputManager.getInputList()) {
       for (Map<String, Object> map : filterConfigList) {
         if (map == null) {
           continue;
         }
-        mergeBlocks(globalMap, map);
+        mergeBlocks(globalConfigs, map);
 
         String value = (String) map.get("filter");
-        Filter filter;
-        if (value == null || value.isEmpty()) {
-          logger.error("Filter block doesn't have filter element");
-          continue;
-        }
-
-        String classFullName = AliasUtil.getInstance().readAlias(value, ALIAS_TYPE.FILTER, ALIAS_PARAM.KLASS);
-        if (classFullName == null || classFullName.isEmpty()) {
-          logger.error("Filter block doesn't have filter element");
+        if (StringUtils.isEmpty(value)) {
+          LOG.error("Filter block doesn't have filter element");
           continue;
         }
-        filter = (Filter) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.FILTER);
-
+        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
         if (filter == null) {
-          logger.error("Filter Object is null");
+          LOG.error("Filter object could not be found");
           continue;
         }
         filter.loadConfig(map);
         filter.setInput(input);
 
         if (filter.isEnabled()) {
-          filter.setOutputMgr(outMgr);
-          if (prevFilter == null) {
-            input.setFirstFilter(filter);
-          } else {
-            prevFilter.setNextFilter(filter);
-          }
-          prevFilter = filter;
+          filter.setOutputManager(outputManager);
+          input.addFilter(filter);
           filter.logConfgs(Level.INFO);
         } else {
-          logger.debug("Ignoring filter "
-            + filter.getShortDescription() + " for input "
-            + input.getShortDescription());
+          LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
         }
       }
+      
       if (input.getFirstFilter() == null) {
         toRemoveInputList.add(input);
       }
     }
 
-    // Fourth loop is for associating valid outputs to input
-    Set<Output> usedOutputSet = new HashSet<Output>();
-    for (Input input : inputMgr.getInputList()) {
-      for (Output output : outputList) {
-        boolean ret = LogFeederUtil.isEnabled(output.getConfigs(),
-          input.getConfigs());
-        if (ret) {
-          usedOutputSet.add(output);
-          input.addOutput(output);
-        }
-      }
-    }
-    outputList = usedOutputSet;
-
     for (Input toRemoveInput : toRemoveInputList) {
-      logger.warn("There are no filters, we will ignore this input. "
-        + toRemoveInput.getShortDescription());
-      inputMgr.removeInput(toRemoveInput);
+      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
+      inputManager.removeInput(toRemoveInput);
     }
   }
 
-  private void sortBlocks(List<Map<String, Object>> blockList) {
-
-    Collections.sort(blockList, new Comparator<Map<String, Object>>() {
+  private void sortFilters() {
+    Collections.sort(filterConfigList, new Comparator<Map<String, Object>>() {
 
       @Override
       public int compare(Map<String, Object> o1, Map<String, Object> o2) {
         Object o1Sort = o1.get("sort_order");
         Object o2Sort = o2.get("sort_order");
-        if (o1Sort == null) {
-          return 0;
-        }
-        if (o2Sort == null) {
+        if (o1Sort == null || o2Sort == null) {
           return 0;
         }
-        int o1Value = 0;
-        if (!(o1Sort instanceof Number)) {
-          try {
-            o1Value = (new Double(Double.parseDouble(o1Sort
-              .toString()))).intValue();
-          } catch (Throwable t) {
-            logger.error("Value is not of type Number. class="
-              + o1Sort.getClass().getName() + ", value="
-              + o1Sort.toString() + ", map=" + o1.toString());
-          }
-        } else {
-          o1Value = ((Number) o1Sort).intValue();
-        }
-        int o2Value = 0;
-        if (!(o2Sort instanceof Integer)) {
+        
+        int o1Value = parseSort(o1, o1Sort);
+        int o2Value = parseSort(o2, o2Sort);
+        
+        return o1Value - o2Value;
+      }
+
+      private int parseSort(Map<String, Object> map, Object o) {
+        if (!(o instanceof Number)) {
           try {
-            o2Value = (new Double(Double.parseDouble(o2Sort
-              .toString()))).intValue();
+            return (new Double(Double.parseDouble(o.toString()))).intValue();
           } catch (Throwable t) {
-            logger.error("Value is not of type Number. class="
-              + o2Sort.getClass().getName() + ", value="
-              + o2Sort.toString() + ", map=" + o2.toString());
+            LOG.error("Value is not of type Number. class=" + o.getClass().getName() + ", value=" + o.toString()
+              + ", map=" + map.toString());
+            return 0;
           }
         } else {
-
+          return ((Number) o).intValue();
         }
-        return o1Value - o2Value;
       }
     });
   }
 
-  private Map<String, Object> mergeConfigs(
-    List<Map<String, Object>> configList) {
-    Map<String, Object> mergedConfig = new HashMap<String, Object>();
-    for (Map<String, Object> config : configList) {
-      mergeBlocks(config, mergedConfig);
+  private void assignOutputsToInputs() {
+    Set<Output> usedOutputSet = new HashSet<Output>();
+    for (Input input : inputManager.getInputList()) {
+      for (Output output : outputManager.getOutputs()) {
+        if (LogFeederUtil.isEnabled(output.getConfigs(), input.getConfigs())) {
+          usedOutputSet.add(output);
+          input.addOutput(output);
+        }
+      }
     }
-    return mergedConfig;
+    outputManager.retainUsedOutputs(usedOutputSet);
   }
 
-  private void mergeBlocks(Map<String, Object> fromMap,
-                           Map<String, Object> toMap) {
-    // Merge the non-string
+  @SuppressWarnings("unchecked")
+  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
     for (String key : fromMap.keySet()) {
       Object objValue = fromMap.get(key);
       if (objValue == null) {
         continue;
       }
       if (objValue instanceof Map) {
-        @SuppressWarnings("unchecked")
-        Map<String, Object> globalFields = LogFeederUtil
-          .cloneObject((Map<String, Object>) fromMap.get(key));
+        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
 
-        @SuppressWarnings("unchecked")
-        Map<String, Object> localFields = (Map<String, Object>) toMap
-          .get(key);
+        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
         if (localFields == null) {
           localFields = new HashMap<String, Object>();
           toMap.put(key, localFields);
@@ -477,8 +391,7 @@ public class LogFeeder {
         if (globalFields != null) {
           for (String fieldKey : globalFields.keySet()) {
             if (!localFields.containsKey(fieldKey)) {
-              localFields.put(fieldKey,
-                globalFields.get(fieldKey));
+              localFields.put(fieldKey, globalFields.get(fieldKey));
             }
           }
         }
@@ -493,11 +406,29 @@ public class LogFeeder {
     }
   }
 
+  private class JVMShutdownHook extends Thread {
+
+    public void run() {
+      try {
+        LOG.info("Processing is shutting down.");
+
+        inputManager.close();
+        outputManager.close();
+        inputManager.checkInAll();
+
+        logStats();
+
+        LOG.info("LogSearch is exiting.");
+      } catch (Throwable t) {
+        // Ignore
+      }
+    }
+  }
+
   private void monitor() throws Exception {
-    inputMgr.monitor();
+    inputManager.monitor();
     JVMShutdownHook logfeederJVMHook = new JVMShutdownHook();
-    ShutdownHookManager.get().addShutdownHook(logfeederJVMHook,
-        LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
+    ShutdownHookManager.get().addShutdownHook(logfeederJVMHook, LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
     
     statLoggerThread = new Thread("statLogger") {
 
@@ -512,17 +443,14 @@ public class LogFeeder {
           try {
             logStats();
           } catch (Throwable t) {
-            logger.error(
-              "LogStats: Caught exception while logging stats.",
-              t);
+            LOG.error("LogStats: Caught exception while logging stats.", t);
           }
 
-          if (System.currentTimeMillis() > (lastCheckPointCleanedMS + checkPointCleanIntervalMS)) {
+          if (System.currentTimeMillis() > (lastCheckPointCleanedMS + CHECKPOINT_CLEAN_INTERVAL_MS)) {
             lastCheckPointCleanedMS = System.currentTimeMillis();
-            inputMgr.cleanCheckPointFiles();
+            inputManager.cleanCheckPointFiles();
           }
 
-          // logfeeder is stopped then break the loop
           if (isLogfeederCompleted) {
             break;
           }
@@ -536,84 +464,20 @@ public class LogFeeder {
   }
 
   private void logStats() {
-    inputMgr.logStats();
-    outMgr.logStats();
-
-    if (metricsMgr.isMetricsEnabled()) {
-      List<MetricCount> metricsList = new ArrayList<MetricCount>();
-      inputMgr.addMetricsContainers(metricsList);
-      outMgr.addMetricsContainers(metricsList);
-      metricsMgr.useMetrics(metricsList);
-    }
-  }
-
-  private String readFile(BufferedReader br) throws Exception {
-    try {
-      StringBuilder sb = new StringBuilder();
-      String line = br.readLine();
-      while (line != null) {
-        sb.append(line);
-        line = br.readLine();
-      }
-      return sb.toString();
-    } catch (Exception t) {
-      logger.error("Error loading properties file.", t);
-      throw t;
-    }
-  }
-
-  public Collection<Output> getOutputList() {
-    return outputList;
-  }
-
-  public OutputMgr getOutMgr() {
-    return outMgr;
-  }
-
-  public static void main(String[] args) {
-    LogFeeder logFeeder = new LogFeeder(args);
-    logFeeder.run();
-  }
-
-  public void run() {
-    try {
-      Date startTime = new Date();
-      this.init();
-      Date endTime = new Date();
-      logger.info("Took " + (endTime.getTime() - startTime.getTime())
-        + " ms to initialize");
-      this.monitor();
-      //wait for all background thread before stop main thread
-      this.waitOnAllDaemonThreads();
-    } catch (Throwable t) {
-      logger.fatal("Caught exception in main.", t);
-      System.exit(1);
+    inputManager.logStats();
+    outputManager.logStats();
+
+    if (metricsManager.isMetricsEnabled()) {
+      List<MetricData> metricsList = new ArrayList<MetricData>();
+      inputManager.addMetricsContainers(metricsList);
+      outputManager.addMetricsContainers(metricsList);
+      metricsManager.useMetrics(metricsList);
     }
   }
 
-  private class JVMShutdownHook extends Thread {
-
-    public void run() {
-      try {
-        logger.info("Processing is shutting down.");
-
-        inputMgr.close();
-        outMgr.close();
-        inputMgr.checkInAll();
-
-        logStats();
-
-        logger.info("LogSearch is exiting.");
-      } catch (Throwable t) {
-        // Ignore
-      }
-    }
-  }
-  
   private void waitOnAllDaemonThreads() {
-    String foreground = LogFeederUtil.getStringProperty("foreground");
-    if (foreground != null && foreground.equalsIgnoreCase("true")) {
-      inputMgr.waitOnAllInputs();
+    if ("true".equals(LogFeederUtil.getStringProperty("foreground"))) {
+      inputManager.waitOnAllInputs();
       isLogfeederCompleted = true;
       if (statLoggerThread != null) {
         try {
@@ -624,24 +488,16 @@ public class LogFeeder {
       }
     }
   }
-  
-  private String[] getConfigFromCmdLine() {
-    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
-    if (inputConfigDir != null && !inputConfigDir.isEmpty()) {
-      String[] searchFileWithExtensions = new String[] { "json" };
-      File configDirFile = new File(inputConfigDir);
-      List<File> configFiles = FileUtil.getAllFileFromDir(configDirFile,
-          searchFileWithExtensions, false);
-      if (configFiles != null && configFiles.size() > 0) {
-        String configPaths[] = new String[configFiles.size()];
-        for (int index = 0; index < configFiles.size(); index++) {
-          File configFile = configFiles.get(index);
-          String configFilePath = configFile.getAbsolutePath();
-          configPaths[index] = configFilePath;
-        }
-        return configPaths;
-      }
+
+  public static void main(String[] args) {
+    try {
+      LogFeederUtil.loadProperties("logfeeder.properties", args);
+    } catch (Throwable t) {
+      LOG.warn("Could not load logfeeder properites");
+      System.exit(1);
     }
-    return new String[0];
+
+    LogFeeder logFeeder = new LogFeeder();
+    logFeeder.run();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
index 287982f..47ddc51 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
@@ -23,27 +23,27 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 
 
 public abstract class ConfigBlock {
-  static private Logger logger = Logger.getLogger(ConfigBlock.class);
+  private static final Logger LOG = Logger.getLogger(ConfigBlock.class);
 
   private boolean drain = false;
 
   protected Map<String, Object> configs;
   protected Map<String, String> contextFields = new HashMap<String, String>();
-  public MetricCount statMetric = new MetricCount();
-
-  /**
-   *
-   */
+  public MetricData statMetric = new MetricData(getStatMetricName(), false);
+  protected String getStatMetricName() {
+    return null;
+  }
+  
   public ConfigBlock() {
-    super();
   }
 
   /**
@@ -58,10 +58,7 @@ public abstract class ConfigBlock {
     return this.getClass().getSimpleName();
   }
 
-  /**
-   * @param metricsList
-   */
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     metricsList.add(statMetric);
   }
 
@@ -89,25 +86,21 @@ public abstract class ConfigBlock {
     boolean isEnabled = getBooleanValue("is_enabled", true);
     if (isEnabled) {
       // Let's check for static conditions
-      Map<String, Object> conditions = (Map<String, Object>) configs
-        .get("conditions");
+      Map<String, Object> conditions = (Map<String, Object>) configs.get("conditions");
       boolean allow = true;
-      if (conditions != null && conditions.size() > 0) {
+      if (MapUtils.isNotEmpty(conditions)) {
         allow = false;
         for (String conditionType : conditions.keySet()) {
           if (conditionType.equalsIgnoreCase("fields")) {
-            Map<String, Object> fields = (Map<String, Object>) conditions
-              .get("fields");
+            Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
             for (String fieldName : fields.keySet()) {
               Object values = fields.get(fieldName);
               if (values instanceof String) {
-                allow = isFieldConditionMatch(fieldName,
-                  (String) values);
+                allow = isFieldConditionMatch(fieldName, (String) values);
               } else {
                 List<String> listValues = (List<String>) values;
                 for (String stringValue : listValues) {
-                  allow = isFieldConditionMatch(fieldName,
-                    stringValue);
+                  allow = isFieldConditionMatch(fieldName, stringValue);
                   if (allow) {
                     break;
                   }
@@ -135,8 +128,7 @@ public abstract class ConfigBlock {
       allow = true;
     } else {
       @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) configs.get("add_fields");
       if (addFields != null && addFields.get(fieldName) != null) {
         String addFieldValue = (String) addFields.get(fieldName);
         if (stringValue.equalsIgnoreCase(addFieldValue)) {
@@ -184,12 +176,7 @@ public abstract class ConfigBlock {
     String strValue = getStringValue(key);
     boolean retValue = defaultValue;
     if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
+      retValue = (strValue.equalsIgnoreCase("true") || strValue.equalsIgnoreCase("yes"));
     }
     return retValue;
   }
@@ -201,8 +188,7 @@ public abstract class ConfigBlock {
       try {
         retValue = Integer.parseInt(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing integer value. key=" + key
-          + ", value=" + strValue);
+        LOG.error("Error parsing integer value. key=" + key + ", value=" + strValue);
       }
     }
     return retValue;
@@ -215,8 +201,7 @@ public abstract class ConfigBlock {
       try {
         retValue = Long.parseLong(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing long value. key=" + key + ", value="
-            + strValue);
+        LOG.error("Error parsing long value. key=" + key + ", value=" + strValue);
       }
     }
     return retValue;
@@ -227,29 +212,27 @@ public abstract class ConfigBlock {
   }
 
   public void incrementStat(int count) {
-    statMetric.count += count;
+    statMetric.value += count;
   }
 
-  public void logStatForMetric(MetricCount metric, String prefixStr) {
-    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key="
-      + getShortDescription());
+  public void logStatForMetric(MetricData metric, String prefixStr) {
+    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key=" + getShortDescription());
   }
 
-  synchronized public void logStat() {
+  public synchronized void logStat() {
     logStatForMetric(statMetric, "Stat");
   }
 
   public boolean logConfgs(Priority level) {
-    if (level.toInt() == Priority.INFO_INT && !logger.isInfoEnabled()) {
+    if (level.toInt() == Priority.INFO_INT && !LOG.isInfoEnabled()) {
       return false;
     }
-    if (level.toInt() == Priority.DEBUG_INT && !logger.isDebugEnabled()) {
+    if (level.toInt() == Priority.DEBUG_INT && !LOG.isDebugEnabled()) {
       return false;
     }
-    logger.log(level, "Printing configuration Block="
-      + getShortDescription());
-    logger.log(level, "configs=" + configs);
-    logger.log(level, "contextFields=" + contextFields);
+    LOG.log(level, "Printing configuration Block=" + getShortDescription());
+    LOG.log(level, "configs=" + configs);
+    LOG.log(level, "contextFields=" + contextFields);
     return true;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
new file mode 100644
index 0000000..d1e7fba
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/LogFeederConstants.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.common;
+
+public class LogFeederConstants {
+
+  public static final String ALL = "all";
+  public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config";
+  public static final String LOG_LEVEL_UNKNOWN = "UNKNOWN";
+  
+  // solr fields
+  public static final String SOLR_LEVEL = "level";
+  public static final String SOLR_COMPONENT = "type";
+  public static final String SOLR_HOST = "host";
+
+  // UserConfig Constants History
+  public static final String VALUES = "jsons";
+  public static final String ROW_TYPE = "rowtype";
+  
+  // S3 Constants
+  public static final String S3_PATH_START_WITH = "s3://";
+  public static final String S3_PATH_SEPARATOR = "/";
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
index ab371f1..684f3c4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/Filter.java
@@ -29,21 +29,19 @@ import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.mapper.Mapper;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.ambari.logfeeder.util.AliasUtil;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
 import org.apache.log4j.Logger;
 import org.apache.log4j.Priority;
 
 public abstract class Filter extends ConfigBlock {
-  private static final Logger logger = Logger.getLogger(Filter.class);
+  private static final Logger LOG = Logger.getLogger(Filter.class);
 
   protected Input input;
   private Filter nextFilter = null;
-  private OutputMgr outputMgr;
+  private OutputManager outputManager;
 
   private Map<String, List<Mapper>> postFieldValueMappers = new HashMap<String, List<Mapper>>();
 
@@ -74,15 +72,12 @@ public abstract class Filter extends ConfigBlock {
       }
       for (Map<String, Object> mapObject : mapList) {
         for (String mapClassCode : mapObject.keySet()) {
-          Mapper mapper = getMapper(mapClassCode);
+          Mapper mapper = (Mapper) AliasUtil.getClassInstance(mapClassCode, AliasType.MAPPER);
           if (mapper == null) {
             break;
           }
-          if (mapper.init(getInput().getShortDescription(),
-            fieldName, mapClassCode,
-            mapObject.get(mapClassCode))) {
-            List<Mapper> fieldMapList = postFieldValueMappers
-              .get(fieldName);
+          if (mapper.init(getInput().getShortDescription(), fieldName, mapClassCode, mapObject.get(mapClassCode))) {
+            List<Mapper> fieldMapList = postFieldValueMappers.get(fieldName);
             if (fieldMapList == null) {
               fieldMapList = new ArrayList<Mapper>();
               postFieldValueMappers.put(fieldName, fieldMapList);
@@ -94,17 +89,8 @@ public abstract class Filter extends ConfigBlock {
     }
   }
 
-  private Mapper getMapper(String mapClassCode) {
-    String classFullName = AliasUtil.getInstance().readAlias(mapClassCode, ALIAS_TYPE.MAPPER, ALIAS_PARAM.KLASS);
-    if (classFullName != null && !classFullName.isEmpty()) {
-      Mapper mapper = (Mapper) LogFeederUtil.getClassInstance(classFullName, ALIAS_TYPE.MAPPER);
-      return mapper;
-    }
-    return null;
-  }
-
-  public void setOutputMgr(OutputMgr outputMgr) {
-    this.outputMgr = outputMgr;
+  public void setOutputManager(OutputManager outputManager) {
+    this.outputManager = outputManager;
   }
 
   public Filter getNextFilter() {
@@ -131,25 +117,23 @@ public abstract class Filter extends ConfigBlock {
     if (nextFilter != null) {
       nextFilter.apply(inputStr, inputMarker);
     } else {
-      outputMgr.write(inputStr, inputMarker);
+      outputManager.write(inputStr, inputMarker);
     }
   }
 
   public void apply(Map<String, Object> jsonObj, InputMarker inputMarker) throws LogfeederException {
-    if (postFieldValueMappers.size() > 0) {
-      for (String fieldName : postFieldValueMappers.keySet()) {
-        Object value = jsonObj.get(fieldName);
-        if (value != null) {
-          for (Mapper mapper : postFieldValueMappers.get(fieldName)) {
-            value = mapper.apply(jsonObj, value);
-          }
+    for (String fieldName : postFieldValueMappers.keySet()) {
+      Object value = jsonObj.get(fieldName);
+      if (value != null) {
+        for (Mapper mapper : postFieldValueMappers.get(fieldName)) {
+          value = mapper.apply(jsonObj, value);
         }
       }
     }
     if (nextFilter != null) {
       nextFilter.apply(jsonObj, inputMarker);
     } else {
-      outputMgr.write(jsonObj, inputMarker);
+      outputManager.write(jsonObj, inputMarker);
     }
   }
 
@@ -193,16 +177,15 @@ public abstract class Filter extends ConfigBlock {
     if (!super.logConfgs(level)) {
       return false;
     }
-    logger.log(level, "input=" + input.getShortDescription());
+    LOG.log(level, "input=" + input.getShortDescription());
     return true;
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     if (nextFilter != null) {
       nextFilter.addMetricsContainers(metricsList);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
index 372c208..7e2da70 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterGrok.java
@@ -36,7 +36,7 @@ import oi.thekraken.grok.api.exception.GrokException;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
@@ -45,7 +45,7 @@ import org.apache.log4j.Logger;
 import com.google.gson.reflect.TypeToken;
 
 public class FilterGrok extends Filter {
-  static private Logger logger = Logger.getLogger(FilterGrok.class);
+  private static final Logger LOG = Logger.getLogger(FilterGrok.class);
 
   private static final String GROK_PATTERN_FILE = "grok-patterns";
 
@@ -68,25 +68,23 @@ public class FilterGrok extends Filter {
 
   private Type jsonType = new TypeToken<Map<String, String>>() {}.getType();
 
-  private MetricCount grokErrorMetric = new MetricCount();
+  private MetricData grokErrorMetric = new MetricData("filter.error.grok", false);
 
   @Override
   public void init() throws Exception {
     super.init();
 
     try {
-      grokErrorMetric.metricsName = "filter.error.grok";
       messagePattern = escapePattern(getStringValue("message_pattern"));
       multilinePattern = escapePattern(getStringValue("multiline_pattern"));
       sourceField = getStringValue("source_field");
       removeSourceField = getBooleanValue("remove_source_field",
         removeSourceField);
 
-      logger.info("init() done. grokPattern=" + messagePattern
-        + ", multilinePattern=" + multilinePattern + ", "
-        + getShortDescription());
+      LOG.info("init() done. grokPattern=" + messagePattern + ", multilinePattern=" + multilinePattern + ", " +
+      getShortDescription());
       if (StringUtils.isEmpty(messagePattern)) {
-        logger.error("message_pattern is not set for filter.");
+        LOG.error("message_pattern is not set for filter.");
         return;
       }
       extractNamedParams(messagePattern, namedParamList);
@@ -102,9 +100,7 @@ public class FilterGrok extends Filter {
         grokMultiline.compile(multilinePattern);
       }
     } catch (Throwable t) {
-      logger.fatal(
-        "Caught exception while initializing Grok. multilinePattern="
-          + multilinePattern + ", messagePattern="
+      LOG.fatal("Caught exception while initializing Grok. multilinePattern=" + multilinePattern + ", messagePattern="
           + messagePattern, t);
       grokMessage = null;
       grokMultiline = null;
@@ -123,9 +119,10 @@ public class FilterGrok extends Filter {
   }
 
   private void extractNamedParams(String patternStr, Set<String> paramList) {
-    String grokRegEx = "%\\{" + "(?<name>" + "(?<pattern>[A-z0-9]+)"
-      + "(?::(?<subname>[A-z0-9_:]+))?" + ")" + "(?:=(?<definition>"
-      + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" + "\\}";
+    String grokRegEx = "%\\{" +
+        "(?<name>" + "(?<pattern>[A-z0-9]+)" + "(?::(?<subname>[A-z0-9_:]+))?" + ")" +
+        "(?:=(?<definition>" + "(?:" + "(?:[^{}]+|\\.+)+" + ")+" + ")" + ")?" +
+        "\\}";
 
     Pattern pattern = Pattern.compile(grokRegEx);
     java.util.regex.Matcher matcher = pattern.matcher(patternStr);
@@ -139,28 +136,23 @@ public class FilterGrok extends Filter {
 
   private boolean loadPatterns(Grok grok) {
     InputStreamReader grokPatternsReader = null;
-    logger.info("Loading pattern file " + GROK_PATTERN_FILE);
+    LOG.info("Loading pattern file " + GROK_PATTERN_FILE);
     try {
-      BufferedInputStream fileInputStream = (BufferedInputStream) this
-        .getClass().getClassLoader()
-        .getResourceAsStream(GROK_PATTERN_FILE);
+      BufferedInputStream fileInputStream =
+          (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(GROK_PATTERN_FILE);
       if (fileInputStream == null) {
-        logger.fatal("Couldn't load grok-patterns file "
-          + GROK_PATTERN_FILE + ". Things will not work");
+        LOG.fatal("Couldn't load grok-patterns file " + GROK_PATTERN_FILE + ". Things will not work");
         return false;
       }
       grokPatternsReader = new InputStreamReader(fileInputStream);
     } catch (Throwable t) {
-      logger.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE
-        + " from classpath. Grok filtering will not work.", t);
+      LOG.fatal("Error reading grok-patterns file " + GROK_PATTERN_FILE + " from classpath. Grok filtering will not work.", t);
       return false;
     }
     try {
       grok.addPatternFromReader(grokPatternsReader);
     } catch (GrokException e) {
-      logger.fatal(
-        "Error loading patterns from grok-patterns reader for file "
-          + GROK_PATTERN_FILE, e);
+      LOG.fatal("Error loading patterns from grok-patterns reader for file " + GROK_PATTERN_FILE, e);
       return false;
     }
 
@@ -177,8 +169,7 @@ public class FilterGrok extends Filter {
       String jsonStr = grokMultiline.capture(inputStr);
       if (!"{}".equals(jsonStr)) {
         if (strBuff != null) {
-          Map<String, Object> jsonObj = Collections
-            .synchronizedMap(new HashMap<String, Object>());
+          Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
           try {
             applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr);
           } finally {
@@ -192,15 +183,13 @@ public class FilterGrok extends Filter {
       if (strBuff == null) {
         strBuff = new StringBuilder();
       } else {
-        strBuff.append('\r');
-        strBuff.append('\n');
+        strBuff.append("\r\n");
       }
       strBuff.append(inputStr);
       savedInputMarker = inputMarker;
     } else {
       savedInputMarker = inputMarker;
-      Map<String, Object> jsonObj = Collections
-        .synchronizedMap(new HashMap<String, Object>());
+      Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
       applyMessage(inputStr, jsonObj, null);
     }
   }
@@ -216,14 +205,8 @@ public class FilterGrok extends Filter {
     }
   }
 
-  /**
-   * @param inputStr
-   * @param jsonObj
-   * @throws LogfeederException 
-   */
-  private void applyMessage(String inputStr, Map<String, Object> jsonObj,
-                            String multilineJsonStr) throws LogfeederException {
-    String jsonStr = grokParse(inputStr);
+  private void applyMessage(String inputStr, Map<String, Object> jsonObj, String multilineJsonStr) throws LogfeederException {
+    String jsonStr = grokMessage.capture(inputStr);
 
     boolean parseError = false;
     if ("{}".equals(jsonStr)) {
@@ -239,8 +222,7 @@ public class FilterGrok extends Filter {
     if (parseError) {
       jsonStr = multilineJsonStr;
     }
-    Map<String, String> jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr,
-      jsonType);
+    Map<String, String> jsonSrc = LogFeederUtil.getGson().fromJson(jsonStr, jsonType);
     for (String namedParam : namedParamList) {
       if (jsonSrc.get(namedParam) != null) {
         jsonObj.put(namedParam, jsonSrc.get(namedParam));
@@ -260,37 +242,26 @@ public class FilterGrok extends Filter {
       }
     }
     super.apply(jsonObj, savedInputMarker);
-    statMetric.count++;
-  }
-
-  public String grokParse(String inputStr) {
-    String jsonStr = grokMessage.capture(inputStr);
-    return jsonStr;
+    statMetric.value++;
   }
 
   private void logParseError(String inputStr) {
-    grokErrorMetric.count++;
-    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-      + "_PARSEERROR";
+    grokErrorMetric.value++;
+    String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR";
     int inputStrLength = inputStr != null ? inputStr.length() : 0;
-    LogFeederUtil.logErrorMessageByInterval(
-      LOG_MESSAGE_KEY,
-      "Error parsing string. length=" + inputStrLength
-        + ", input=" + input.getShortDescription()
-        + ". First upto 100 characters="
-        + LogFeederUtil.subString(inputStr, 100), null, logger,
-      Level.WARN);
+    LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStrLength + ", input=" +
+        input.getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG,
+        Level.WARN);
   }
 
   @Override
   public void flush() {
     if (strBuff != null) {
-      Map<String, Object> jsonObj = Collections
-        .synchronizedMap(new HashMap<String, Object>());
+      Map<String, Object> jsonObj = Collections.synchronizedMap(new HashMap<String, Object>());
       try {
         applyMessage(strBuff.toString(), jsonObj, currMultilineJsonStr);
       } catch (LogfeederException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
+        LOG.error(e.getLocalizedMessage(), e.getCause());
       }
       strBuff = null;
       savedInputMarker = null;
@@ -304,7 +275,7 @@ public class FilterGrok extends Filter {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(grokErrorMetric);
   }
@@ -314,5 +285,4 @@ public class FilterGrok extends Filter {
     super.logStat();
     logStatForMetric(grokErrorMetric, "Stat: Grok Errors");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
index 2954106..ba63c61 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterJSON.java
@@ -22,12 +22,13 @@ import java.util.Map;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public class FilterJSON extends Filter {
   
-  private static final Logger logger  = Logger.getLogger(FilterJSON.class);
+  private static final Logger LOG  = Logger.getLogger(FilterJSON.class);
 
   @Override
   public void apply(String inputStr, InputMarker inputMarker) throws LogfeederException {
@@ -35,7 +36,7 @@ public class FilterJSON extends Filter {
     try {
       jsonMap = LogFeederUtil.toJSONObject(inputStr);
     } catch (Exception e) {
-      logger.error(e.getLocalizedMessage());
+      LOG.error(e.getLocalizedMessage());
       throw new LogfeederException("Json parsing failed for inputstr = " + inputStr ,e.getCause());
     }
     Double lineNumberD = (Double) jsonMap.get("line_number");
@@ -45,10 +46,9 @@ public class FilterJSON extends Filter {
     }
     String timeStampStr = (String) jsonMap.get("logtime");
     if (timeStampStr != null && !timeStampStr.isEmpty()) {
-      String logtime = LogFeederUtil.getDate(timeStampStr);
+      String logtime = DateUtil.getDate(timeStampStr);
       jsonMap.put("logtime", logtime);
     }
     super.apply(jsonMap, inputMarker);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
index 7adb468..c9c3f2c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
@@ -25,38 +25,35 @@ import java.util.StringTokenizer;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
 public class FilterKeyValue extends Filter {
-  private static final Logger logger = Logger.getLogger(FilterKeyValue.class);
+  private static final Logger LOG = Logger.getLogger(FilterKeyValue.class);
 
   private String sourceField = null;
   private String valueSplit = "=";
   private String fieldSplit = "\t";
 
-  private MetricCount errorMetric = new MetricCount();
+  private MetricData errorMetric = new MetricData("filter.error.keyvalue", false);
 
   @Override
   public void init() throws Exception {
     super.init();
-    errorMetric.metricsName = "filter.error.keyvalue";
 
     sourceField = getStringValue("source_field");
     valueSplit = getStringValue("value_split", valueSplit);
     fieldSplit = getStringValue("field_split", fieldSplit);
 
-    logger.info("init() done. source_field=" + sourceField
-      + ", value_split=" + valueSplit + ", " + ", field_split="
-      + fieldSplit + ", " + getShortDescription());
+    LOG.info("init() done. source_field=" + sourceField + ", value_split=" + valueSplit + ", " + ", field_split=" +
+        fieldSplit + ", " + getShortDescription());
     if (StringUtils.isEmpty(sourceField)) {
-      logger.fatal("source_field is not set for filter. This filter will not be applied");
+      LOG.fatal("source_field is not set for filter. This filter will not be applied");
       return;
     }
-
   }
 
   @Override
@@ -71,40 +68,30 @@ public class FilterKeyValue extends Filter {
     }
     Object valueObj = jsonObj.get(sourceField);
     if (valueObj != null) {
-      StringTokenizer fieldTokenizer = new StringTokenizer(
-        valueObj.toString(), fieldSplit);
+      StringTokenizer fieldTokenizer = new StringTokenizer(valueObj.toString(), fieldSplit);
       while (fieldTokenizer.hasMoreTokens()) {
         String nv = fieldTokenizer.nextToken();
-        StringTokenizer nvTokenizer = new StringTokenizer(nv,
-          valueSplit);
+        StringTokenizer nvTokenizer = new StringTokenizer(nv, valueSplit);
         while (nvTokenizer.hasMoreTokens()) {
           String name = nvTokenizer.nextToken();
           if (nvTokenizer.hasMoreTokens()) {
             String value = nvTokenizer.nextToken();
             jsonObj.put(name, value);
           } else {
-            logParseError("name=" + name + ", pair=" + nv
-              + ", field=" + sourceField + ", field_value="
-              + valueObj);
+            logParseError("name=" + name + ", pair=" + nv + ", field=" + sourceField + ", field_value=" + valueObj);
           }
         }
       }
     }
     super.apply(jsonObj, inputMarker);
-    statMetric.count++;
+    statMetric.value++;
   }
 
   private void logParseError(String inputStr) {
-    errorMetric.count++;
-    final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-      + "_PARSEERROR";
-    LogFeederUtil
-      .logErrorMessageByInterval(
-        LOG_MESSAGE_KEY,
-        "Error parsing string. length=" + inputStr.length()
-          + ", input=" + input.getShortDescription()
-          + ". First upto 100 characters="
-          + LogFeederUtil.subString(inputStr, 100), null, logger,
+    errorMetric.value++;
+    String logMessageKey = this.getClass().getSimpleName() + "_PARSEERROR";
+    LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error parsing string. length=" + inputStr.length() + ", input=" +
+        input.getShortDescription() + ". First upto 100 characters=" + StringUtils.abbreviate(inputStr, 100), null, LOG,
         Level.ERROR);
   }
 
@@ -114,9 +101,8 @@ public class FilterKeyValue extends Filter {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(errorMetric);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
new file mode 100644
index 0000000..41a1fa5
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/AbstractInputFile.java
@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.input;
+
+import java.io.BufferedReader;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+public abstract class AbstractInputFile extends Input {
+  protected static final Logger LOG = Logger.getLogger(AbstractInputFile.class);
+
+  private static final int DEFAULT_CHECKPOINT_INTERVAL_MS = 5 * 1000;
+
+  protected File[] logFiles;
+  protected String logPath;
+  protected Object fileKey;
+  protected String base64FileKey;
+
+  protected boolean isReady;
+  private boolean isStartFromBegining = true;
+
+  private String checkPointExtension;
+  private File checkPointFile;
+  private RandomAccessFile checkPointWriter;
+  private long lastCheckPointTimeMS;
+  private int checkPointIntervalMS;
+  private Map<String, Object> jsonCheckPoint;
+  private InputMarker lastCheckPointInputMarker;
+
+  @Override
+  protected String getStatMetricName() {
+    return "input.files.read_lines";
+  }
+  
+  @Override
+  protected String getReadBytesMetricName() {
+    return "input.files.read_bytes";
+  }
+  
+  @Override
+  public void init() throws Exception {
+    LOG.info("init() called");
+    
+    checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", InputManager.DEFAULT_CHECKPOINT_EXTENSION);
+
+    // Let's close the file and set it to true after we start monitoring it
+    setClosed(true);
+    logPath = getStringValue("path");
+    tail = getBooleanValue("tail", tail);
+    checkPointIntervalMS = getIntValue("checkpoint.interval.ms", DEFAULT_CHECKPOINT_INTERVAL_MS);
+
+    if (StringUtils.isEmpty(logPath)) {
+      LOG.error("path is empty for file input. " + getShortDescription());
+      return;
+    }
+
+    String startPosition = getStringValue("start_position");
+    if (StringUtils.isEmpty(startPosition) || startPosition.equalsIgnoreCase("beginning") ||
+        startPosition.equalsIgnoreCase("begining") || !tail) {
+      isStartFromBegining = true;
+    }
+
+    setFilePath(logPath);
+    boolean isFileReady = isReady();
+
+    LOG.info("File to monitor " + logPath + ", tail=" + tail + ", isReady=" + isFileReady);
+
+    super.init();
+  }
+
+  protected void processFile(File logPathFile) throws FileNotFoundException, IOException {
+    LOG.info("Monitoring logPath=" + logPath + ", logPathFile=" + logPathFile);
+    BufferedReader br = null;
+    checkPointFile = null;
+    checkPointWriter = null;
+    jsonCheckPoint = null;
+
+    int lineCount = 0;
+    try {
+      setFilePath(logPathFile.getAbsolutePath());
+      
+      br = openLogFile(logPathFile);
+
+      boolean resume = isStartFromBegining;
+      int resumeFromLineNumber = getResumeFromLineNumber();
+      if (resumeFromLineNumber > 0) {
+        resume = false;
+      }
+      
+      setClosed(false);
+      int sleepStep = 2;
+      int sleepIteration = 0;
+      while (true) {
+        try {
+          if (isDrain()) {
+            break;
+          }
+
+          String line = br.readLine();
+          if (line == null) {
+            if (!resume) {
+              resume = true;
+            }
+            sleepIteration++;
+            if (sleepIteration == 2) {
+              flush();
+              if (!tail) {
+                LOG.info("End of file. Done with filePath=" + logPathFile.getAbsolutePath() + ", lineCount=" + lineCount);
+                break;
+              }
+            } else if (sleepIteration > 4) {
+              Object newFileKey = getFileKey(logPathFile);
+              if (newFileKey != null && (fileKey == null || !newFileKey.equals(fileKey))) {
+                LOG.info("File key is different. Marking this input file for rollover. oldKey=" + fileKey + ", newKey=" +
+                    newFileKey + ". " + getShortDescription());
+                
+                try {
+                  LOG.info("File is rolled over. Closing current open file." + getShortDescription() + ", lineCount=" +
+                      lineCount);
+                  br.close();
+                } catch (Exception ex) {
+                  LOG.error("Error closing file" + getShortDescription(), ex);
+                  break;
+                }
+                
+                try {
+                  LOG.info("Opening new rolled over file." + getShortDescription());
+                  br = openLogFile(logPathFile);
+                  lineCount = 0;
+                } catch (Exception ex) {
+                  LOG.error("Error opening rolled over file. " + getShortDescription(), ex);
+                  LOG.info("Added input to not ready list." + getShortDescription());
+                  isReady = false;
+                  inputManager.addToNotReady(this);
+                  break;
+                }
+                LOG.info("File is successfully rolled over. " + getShortDescription());
+                continue;
+              }
+            }
+            try {
+              Thread.sleep(sleepStep * 1000);
+              sleepStep = Math.min(sleepStep * 2, 10);
+            } catch (InterruptedException e) {
+              LOG.info("Thread interrupted." + getShortDescription());
+            }
+          } else {
+            lineCount++;
+            sleepStep = 1;
+            sleepIteration = 0;
+
+            if (!resume && lineCount > resumeFromLineNumber) {
+              LOG.info("Resuming to read from last line. lineCount=" + lineCount + ", input=" + getShortDescription());
+              resume = true;
+            }
+            if (resume) {
+              InputMarker marker = new InputMarker(this, base64FileKey, lineCount);
+              outputLine(line, marker);
+            }
+          }
+        } catch (Throwable t) {
+          String logMessageKey = this.getClass().getSimpleName() + "_READ_LOOP_EXCEPTION";
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception in read loop. lineNumber=" + lineCount +
+              ", input=" + getShortDescription(), t, LOG, Level.ERROR);
+        }
+      }
+    } finally {
+      if (br != null) {
+        LOG.info("Closing reader." + getShortDescription() + ", lineCount=" + lineCount);
+        try {
+          br.close();
+        } catch (Throwable t) {
+          // ignore
+        }
+      }
+    }
+  }
+
+  protected abstract BufferedReader openLogFile(File logFile) throws IOException;
+
+  protected abstract Object getFileKey(File logFile);
+  
+  private int getResumeFromLineNumber() {
+    int resumeFromLineNumber = 0;
+    
+    if (tail) {
+      try {
+        LOG.info("Checking existing checkpoint file. " + getShortDescription());
+
+        String checkPointFileName = base64FileKey + checkPointExtension;
+        File checkPointFolder = inputManager.getCheckPointFolderFile();
+        checkPointFile = new File(checkPointFolder, checkPointFileName);
+        checkPointWriter = new RandomAccessFile(checkPointFile, "rw");
+
+        try {
+          int contentSize = checkPointWriter.readInt();
+          byte b[] = new byte[contentSize];
+          int readSize = checkPointWriter.read(b, 0, contentSize);
+          if (readSize != contentSize) {
+            LOG.error("Couldn't read expected number of bytes from checkpoint file. expected=" + contentSize + ", read=" +
+                readSize + ", checkPointFile=" + checkPointFile + ", input=" + getShortDescription());
+          } else {
+            String jsonCheckPointStr = new String(b, 0, readSize);
+            jsonCheckPoint = LogFeederUtil.toJSONObject(jsonCheckPointStr);
+
+            resumeFromLineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number");
+
+            LOG.info("CheckPoint. checkPointFile=" + checkPointFile + ", json=" + jsonCheckPointStr +
+                ", resumeFromLineNumber=" + resumeFromLineNumber);
+          }
+        } catch (EOFException eofEx) {
+          LOG.info("EOFException. Will reset checkpoint file " + checkPointFile.getAbsolutePath() + " for " +
+              getShortDescription());
+        }
+        if (jsonCheckPoint == null) {
+          // This seems to be first time, so creating the initial checkPoint object
+          jsonCheckPoint = new HashMap<String, Object>();
+          jsonCheckPoint.put("file_path", filePath);
+          jsonCheckPoint.put("file_key", base64FileKey);
+        }
+
+      } catch (Throwable t) {
+        LOG.error("Error while configuring checkpoint file. Will reset file. checkPointFile=" + checkPointFile, t);
+      }
+    }
+    
+    return resumeFromLineNumber;
+  }
+
+  @Override
+  public synchronized void checkIn(InputMarker inputMarker) {
+    if (checkPointWriter != null) {
+      try {
+        int lineNumber = LogFeederUtil.objectToInt(jsonCheckPoint.get("line_number"), 0, "line_number");
+        if (lineNumber > inputMarker.lineNumber) {
+          // Already wrote higher line number for this input
+          return;
+        }
+        // If interval is greater than last checkPoint time, then write
+        long currMS = System.currentTimeMillis();
+        if (!isClosed() && (currMS - lastCheckPointTimeMS) < checkPointIntervalMS) {
+          // Let's save this one so we can update the check point file on flush
+          lastCheckPointInputMarker = inputMarker;
+          return;
+        }
+        lastCheckPointTimeMS = currMS;
+
+        jsonCheckPoint.put("line_number", "" + new Integer(inputMarker.lineNumber));
+        jsonCheckPoint.put("last_write_time_ms", "" + new Long(currMS));
+        jsonCheckPoint.put("last_write_time_date", new Date());
+
+        String jsonStr = LogFeederUtil.getGson().toJson(jsonCheckPoint);
+
+        // Let's rewind
+        checkPointWriter.seek(0);
+        checkPointWriter.writeInt(jsonStr.length());
+        checkPointWriter.write(jsonStr.getBytes());
+
+        if (isClosed()) {
+          String logMessageKey = this.getClass().getSimpleName() + "_FINAL_CHECKIN";
+          LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Wrote final checkPoint, input=" + getShortDescription() +
+              ", checkPointFile=" + checkPointFile.getAbsolutePath() + ", checkPoint=" + jsonStr, null, LOG, Level.INFO);
+        }
+      } catch (Throwable t) {
+        String logMessageKey = this.getClass().getSimpleName() + "_CHECKIN_EXCEPTION";
+        LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Caught exception checkIn. , input=" + getShortDescription(), t,
+            LOG, Level.ERROR);
+      }
+    }
+  }
+
+  @Override
+  public void lastCheckIn() {
+    if (lastCheckPointInputMarker != null) {
+      checkIn(lastCheckPointInputMarker);
+    }
+  }
+
+  @Override
+  public void close() {
+    super.close();
+    LOG.info("close() calling checkPoint checkIn(). " + getShortDescription());
+    lastCheckIn();
+  }
+
+  @Override
+  public String getShortDescription() {
+    return "input:source=" + getStringValue("source") + ", path=" +
+        (!ArrayUtils.isEmpty(logFiles) ? logFiles[0].getAbsolutePath() : logPath);
+  }
+}


[09/50] [abbrv] ambari git commit: AMBARI-18214. Restify Log Search endpoints (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
deleted file mode 100644
index 0144edc..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
+++ /dev/null
@@ -1,575 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.rest;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-
-import io.swagger.annotations.Api;
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.manager.LogsMgr;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.view.VCountList;
-import org.apache.ambari.logsearch.view.VNameValueList;
-import org.apache.ambari.logsearch.view.VNodeList;
-import org.apache.commons.lang.StringEscapeUtils;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.context.annotation.Scope;
-import org.springframework.stereotype.Component;
-
-import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.*;
-import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*;
-
-@Api(value = "dashboard", description = "Dashboard operations")
-@Path("dashboard")
-@Component
-@Scope("request")
-public class DashboardREST {
-
-  @Autowired
-  LogsMgr logMgr;
-
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  @GET
-  @Path("/solr/logs_search")
-  @Produces({"application/json"})
-  @ApiOperation(SEARCH_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FIND_D, name = "find", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = SOURCE_LOG_ID_D, name = "sourceLogId", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = KEYWORD_TYPE_D, name = "keywordType", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String searchSolrData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("keyword", StringEscapeUtils.unescapeXml(request.getParameter("find")));
-    searchCriteria.addParam("sourceLogId", request.getParameter("sourceLogId"));
-    searchCriteria.addParam("keywordType",
-      request.getParameter("keywordType"));
-    searchCriteria.addParam("token",
-      request.getParameter("token"));
-    searchCriteria.addParam("isLastPage",request.getParameter("isLastPage"));
-    return logMgr.searchLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/hosts")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOSTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
-  })
-  public String getHosts(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getHosts(searchCriteria);
-  }
-
-  @GET
-  @Path("/components")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENTS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
-  })
-  public String getComponents(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getComponents(searchCriteria);
-  }
-
-  @GET
-  @Path("/aggregatedData")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AGGREGATED_INFO_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAggregatedInfo(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addRequiredServiceLogsParams(request);
-    return logMgr.getAggregatedInfo(searchCriteria);
-  }
-
-  @GET
-  @Path("/levels_count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VCountList getLogLevelsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getLogLevelCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/components_count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENTS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VCountList getComponentsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getComponentsCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/hosts_count")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOSTS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query")
-  })
-  public VCountList getHostsCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria();
-    searchCriteria.addParam("q", request.getParameter("q"));
-    searchCriteria
-      .addParam("startDate", request.getParameter("start_time"));
-    searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    searchCriteria.addParam("excludeQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("excludeQuery")));
-    searchCriteria.addParam("includeQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("includeQuery")));
-    return logMgr.getHostsCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/getTreeExtension")
-  @Produces({"application/json"})
-  @ApiOperation(GET_TREE_EXTENSION_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VNodeList getTreeExtension(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("hostName", request.getParameter("hostName"));
-    return logMgr.getTreeExtension(searchCriteria);
-  }
-
-  @GET
-  @Path("/getLogLevelCounts")
-  @Produces({"application/json"})
-  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public VNameValueList getLogsLevelCount(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    return logMgr.getLogsLevelCount(searchCriteria);
-  }
-
-  @GET
-  @Path("/getHistogramData")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HISTOGRAM_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getHistogramData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return logMgr.getHistogramData(searchCriteria);
-  }
-
-  @GET
-  @Path("/cancelFindRequest")
-  @Produces({"application/json"})
-  @ApiOperation(CANCEL_FIND_REQUEST_OD)
-  public String cancelFindRequest(@Context HttpServletRequest request) {
-    return logMgr.cancelFindRequestByDate(request);
-  }
-
-  @GET
-  @Path("/exportToTextFile")
-  @Produces({"application/json"})
-  @ApiOperation(EXPORT_TO_TEXT_FILE_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UTC_OFFSET_D, name = "utcOffset", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public Response exportToTextFile(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("format", request.getParameter("format"));
-    searchCriteria.addParam("utcOffset", request.getParameter("utcOffset"));
-    return logMgr.exportToTextFile(searchCriteria);
-
-  }
-
-  @GET
-  @Path("/getHostListByComponent")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getHostListByComponent(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("componentName",
-      request.getParameter("componentName"));
-    return logMgr.getHostListByComponent(searchCriteria);
-  }
-
-  @GET
-  @Path("/getComponentListWithLevelCounts")
-  @Produces({"application/json"})
-  @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getComponentListWithLevelCounts(
-    @Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    return logMgr.getComponentListWithLevelCounts(searchCriteria);
-  }
-
-  @GET
-  @Path("/solr/getBundleIdBoundaryDates")
-  @Produces({"application/json"})
-  @ApiOperation(GET_EXTREME_DATES_FOR_BUNDLE_ID_OD)
-  public String getExtremeDatesForBundelId(@Context HttpServletRequest request) {
-
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addParam(LogSearchConstants.BUNDLE_ID,
-      request.getParameter("bundle_id"));
-
-    return logMgr.getExtremeDatesForBundelId(searchCriteria);
-
-  }
-
-  @GET
-  @Path("/getServiceLogsFieldsName")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOGS_FIELD_NAME_OD)
-  public String getServiceLogsFieldsName() {
-    return logMgr.getServiceLogsFieldsName();
-  }
-
-  @GET
-  @Path("/getServiceLogsSchemaFieldsName")
-  @Produces({"application/json"})
-  @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD)
-  public String getServiceLogsSchemaFieldsName() {
-    return logMgr.getServiceLogsSchemaFieldsName();
-  }
-
-  @GET
-  @Path("/getAnyGraphData")
-  @Produces({"application/json"})
-  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAnyGraphData(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
-    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
-    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
-    searchCriteria.addParam("from", request.getParameter("from"));
-    searchCriteria.addParam("to", request.getParameter("to"));
-    searchCriteria.addParam("unit", request.getParameter("unit"));
-    return logMgr.getAnyGraphData(searchCriteria);
-  }
-
-  @GET
-  @Path("/getAfterBeforeLogs")
-  @Produces({"application/json"})
-  @ApiOperation(GET_AFTER_BEFORE_LOGS_OD)
-  @ApiImplicitParams(value = {
-    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_D,name = "component", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ID_D, name = "id", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = SCROLL_TYPE_D, name = "scrollType", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = NUMBER_ROWS_D, name = "numberRows", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
-  })
-  public String getAfterBeforeLogs(@Context HttpServletRequest request) {
-    SearchCriteria searchCriteria = new SearchCriteria(request);
-    searchCriteria.addRequiredServiceLogsParams(request);
-    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
-    searchCriteria.addParam("compLogFile",
-      request.getParameter("component"));
-    searchCriteria.addParam("id", request.getParameter("id"));
-    searchCriteria.addParam("scrollType",
-      request.getParameter("scrollType"));
-    searchCriteria.addParam("numberRows",
-      request.getParameter("numberRows"));
-    return logMgr.getAfterBeforeLogs(searchCriteria);
-  }
-
-  @GET
-  @Path("/getHadoopServiceConfigJSON")
-  @Produces({"application/json"})
-  @ApiOperation(GET_HADOOP_SERVICE_CONFIG_JSON_OD)
-  public String getHadoopServiceConfigJSON() {
-    return logMgr.getHadoopServiceConfigJSON();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
index d53cff9..6099e0f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
@@ -63,13 +63,14 @@ public class LogFileREST {
   }
 
   @GET
-  @Path("/getLogFileTail")
+  @Path("/tail")
   @Produces({"application/json"})
   @ApiOperation(GET_LOG_FILE_TAIL_OD)
   @ApiImplicitParams(value = {
     @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
     @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
-    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query")
+    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TAIL_SIZE_D, name = "tailSize", dataType = "string", paramType = "query")
   })
   public String getLogFileTail(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
index af48acd..5218f5d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
@@ -40,7 +40,7 @@ public class PublicREST {
   PublicMgr generalMgr;
 
   @GET
-  @Path("/getGeneralConfig")
+  @Path("/config")
   @ApiOperation(OBTAIN_GENERAL_CONFIG_OD)
   public String getGeneralConfig() {
     return generalMgr.getGeneralConfig();

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
new file mode 100644
index 0000000..373f7ff
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
@@ -0,0 +1,578 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.rest;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.manager.LogsMgr;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.view.VCountList;
+import org.apache.ambari.logsearch.view.VNameValueList;
+import org.apache.ambari.logsearch.view.VNodeList;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Scope;
+import org.springframework.stereotype.Component;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*;
+
+@Api(value = "service/logs", description = "Service log operations")
+@Path("service/logs")
+@Component
+@Scope("request")
+public class ServiceLogsREST {
+
+  @Autowired
+  LogsMgr logMgr;
+
+  @Autowired
+  RESTErrorUtil restErrorUtil;
+
+  @GET
+  @Produces({"application/json"})
+  @ApiOperation(SEARCH_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIND_D, name = "find", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = SOURCE_LOG_ID_D, name = "sourceLogId", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = KEYWORD_TYPE_D, name = "keywordType", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String searchSolrData(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("keyword", StringEscapeUtils.unescapeXml(request.getParameter("find")));
+    searchCriteria.addParam("sourceLogId", request.getParameter("sourceLogId"));
+    searchCriteria.addParam("keywordType",
+      request.getParameter("keywordType"));
+    searchCriteria.addParam("token",
+      request.getParameter("token"));
+    searchCriteria.addParam("isLastPage",request.getParameter("isLastPage"));
+    return logMgr.searchLogs(searchCriteria);
+  }
+
+  @GET
+  @Path("/hosts")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
+  })
+  public String getHosts(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addParam("q", request.getParameter("q"));
+    return logMgr.getHosts(searchCriteria);
+  }
+
+  @GET
+  @Path("/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
+  })
+  public String getComponents(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addParam("q", request.getParameter("q"));
+    return logMgr.getComponents(searchCriteria);
+  }
+
+  @GET
+  @Path("/aggregated")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AGGREGATED_INFO_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getAggregatedInfo(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria();
+    searchCriteria.addRequiredServiceLogsParams(request);
+    return logMgr.getAggregatedInfo(searchCriteria);
+  }
+
+  @GET
+  @Path("/levels/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public VCountList getLogLevelsCount(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria();
+    searchCriteria.addParam("q", request.getParameter("q"));
+    searchCriteria
+      .addParam("startDate", request.getParameter("start_time"));
+    searchCriteria.addParam("endDate", request.getParameter("end_time"));
+    return logMgr.getLogLevelCount(searchCriteria);
+  }
+
+  @GET
+  @Path("/components/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public VCountList getComponentsCount(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria();
+    searchCriteria.addParam("q", request.getParameter("q"));
+    searchCriteria
+      .addParam("startDate", request.getParameter("start_time"));
+    searchCriteria.addParam("endDate", request.getParameter("end_time"));
+    return logMgr.getComponentsCount(searchCriteria);
+  }
+
+  @GET
+  @Path("/hosts/count")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query")
+  })
+  public VCountList getHostsCount(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria();
+    searchCriteria.addParam("q", request.getParameter("q"));
+    searchCriteria
+      .addParam("startDate", request.getParameter("start_time"));
+    searchCriteria.addParam("endDate", request.getParameter("end_time"));
+    searchCriteria.addParam("excludeQuery", StringEscapeUtils
+      .unescapeXml(request.getParameter("excludeQuery")));
+    searchCriteria.addParam("includeQuery", StringEscapeUtils
+      .unescapeXml(request.getParameter("includeQuery")));
+    return logMgr.getHostsCount(searchCriteria);
+  }
+
+  @GET
+  @Path("/tree")
+  @Produces({"application/json"})
+  @ApiOperation(GET_TREE_EXTENSION_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public VNodeList getTreeExtension(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("hostName", request.getParameter("hostName"));
+    return logMgr.getTreeExtension(searchCriteria);
+  }
+
+  @GET
+  @Path("/levels/counts/namevalues")
+  @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public VNameValueList getLogsLevelCount(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    return logMgr.getLogsLevelCount(searchCriteria);
+  }
+
+  @GET
+  @Path("/histogram")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HISTOGRAM_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getHistogramData(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("unit", request.getParameter("unit"));
+    return logMgr.getHistogramData(searchCriteria);
+  }
+
+  @GET
+  @Path("/request/cancel")
+  @Produces({"application/json"})
+  @ApiOperation(CANCEL_FIND_REQUEST_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
+  })
+  public String cancelFindRequest(@Context HttpServletRequest request) {
+    String uniqueId = request.getParameter("token");
+    return logMgr.cancelFindRequestByDate(uniqueId);
+  }
+
+  @GET
+  @Path("/export")
+  @Produces({"application/json"})
+  @ApiOperation(EXPORT_TO_TEXT_FILE_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UTC_OFFSET_D, name = "utcOffset", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public Response exportToTextFile(@Context HttpServletRequest request) {
+
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("format", request.getParameter("format"));
+    searchCriteria.addParam("utcOffset", request.getParameter("utcOffset"));
+    return logMgr.exportToTextFile(searchCriteria);
+
+  }
+
+  @GET
+  @Path("/hosts/components")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getHostListByComponent(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("componentName",
+      request.getParameter("componentName"));
+    return logMgr.getHostListByComponent(searchCriteria);
+  }
+
+  @GET
+  @Path("/components/level/counts")
+  @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getComponentListWithLevelCounts(
+    @Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    return logMgr.getComponentListWithLevelCounts(searchCriteria);
+  }
+
+  @GET
+  @Path("/solr/boundarydates")
+  @Produces({"application/json"})
+  @ApiOperation(GET_EXTREME_DATES_FOR_BUNDLE_ID_OD)
+  public String getExtremeDatesForBundelId(@Context HttpServletRequest request) {
+
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addParam(LogSearchConstants.BUNDLE_ID,
+      request.getParameter("bundle_id"));
+
+    return logMgr.getExtremeDatesForBundelId(searchCriteria);
+
+  }
+
+  @GET
+  @Path("/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_FIELD_NAME_OD)
+  public String getServiceLogsFieldsName() {
+    return logMgr.getServiceLogsFieldsName();
+  }
+
+  @GET
+  @Path("/schema/fields")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD)
+  public String getServiceLogsSchemaFieldsName() {
+    return logMgr.getServiceLogsSchemaFieldsName();
+  }
+
+  @GET
+  @Path("/anygraph")
+  @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getAnyGraphData(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
+    searchCriteria.addParam("yAxis", request.getParameter("yAxis"));
+    searchCriteria.addParam("stackBy", request.getParameter("stackBy"));
+    searchCriteria.addParam("from", request.getParameter("from"));
+    searchCriteria.addParam("to", request.getParameter("to"));
+    searchCriteria.addParam("unit", request.getParameter("unit"));
+    return logMgr.getAnyGraphData(searchCriteria);
+  }
+
+  @GET
+  @Path("/truncated")
+  @Produces({"application/json"})
+  @ApiOperation(GET_AFTER_BEFORE_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D,name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ID_D, name = "id", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = SCROLL_TYPE_D, name = "scrollType", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = NUMBER_ROWS_D, name = "numberRows", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
+  public String getAfterBeforeLogs(@Context HttpServletRequest request) {
+    SearchCriteria searchCriteria = new SearchCriteria(request);
+    searchCriteria.addRequiredServiceLogsParams(request);
+    searchCriteria.addParam("hostLogFile", request.getParameter("host"));
+    searchCriteria.addParam("compLogFile",
+      request.getParameter("component"));
+    searchCriteria.addParam("id", request.getParameter("id"));
+    searchCriteria.addParam("scrollType",
+      request.getParameter("scrollType"));
+    searchCriteria.addParam("numberRows",
+      request.getParameter("numberRows"));
+    return logMgr.getAfterBeforeLogs(searchCriteria);
+  }
+
+  @GET
+  @Path("/serviceconfig")
+  @Produces({"application/json"})
+  @ApiOperation(GET_HADOOP_SERVICE_CONFIG_JSON_OD)
+  public String getHadoopServiceConfigJSON() {
+    return logMgr.getHadoopServiceConfigJSON();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
index 4b1675f..a79a737 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -58,7 +58,6 @@ public class UserConfigREST {
   UserConfigMgr userConfigMgr;
 
   @POST
-  @Path("/saveUserConfig")
   @Produces({"application/json"})
   @ApiOperation(SAVE_USER_CONFIG_OD)
   public String saveUserConfig(VUserConfig vhist) {
@@ -66,7 +65,6 @@ public class UserConfigREST {
   }
 
   @PUT
-  @Path("/updateUserConfig")
   @Produces({"application/json"})
   @ApiOperation(UPDATE_USER_CONFIG_OD)
   public String updateUserConfig(VUserConfig vhist) {
@@ -74,14 +72,13 @@ public class UserConfigREST {
   }
 
   @DELETE
-  @Path("/deleteUserConfig/{id}")
+  @Path("/{id}")
   @ApiOperation(DELETE_USER_CONFIG_OD)
   public void deleteUserConfig(@PathParam("id") String id) {
     userConfigMgr.deleteUserConfig(id);
   }
 
   @GET
-  @Path("/getUserConfig")
   @Produces({"application/json"})
   @ApiOperation(GET_USER_CONFIG_OD)
   @ApiImplicitParams(value = {
@@ -101,7 +98,7 @@ public class UserConfigREST {
   }
 
   @GET
-  @Path("/user_filter")
+  @Path("/users/filter")
   @Produces({"application/json"})
   @ApiOperation(GET_USER_FILTER_OD)
   public String getUserFilter(@Context HttpServletRequest request) {
@@ -109,7 +106,7 @@ public class UserConfigREST {
   }
 
   @POST
-  @Path("/user_filter")
+  @Path("/users/filter")
   @Produces({"application/json"})
   @ApiOperation(UPDATE_USER_FILTER_OD)
   public String createUserFilter(String json) {
@@ -117,7 +114,7 @@ public class UserConfigREST {
   }
 
   @PUT
-  @Path("/user_filter/{id}")
+  @Path("/users/filter/{id}")
   @Produces({"application/json"})
   @ApiOperation(GET_USER_FILTER_BY_ID_OD)
   public String updateUserFilter(String json) {
@@ -125,7 +122,7 @@ public class UserConfigREST {
   }
 
   @GET
-  @Path("/getAllUserName")
+  @Path("/users/names")
   @Produces({"application/json"})
   @ApiOperation(GET_ALL_USER_NAMES_OD)
   public String getAllUserName() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
index 44f1aeb..038cec2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
@@ -1,4 +1,4 @@
-<!-- 
+<!--
   Licensed to the Apache Software Foundation (ASF) under one or more
   contributor license agreements.  See the NOTICE file distributed with
   this work for additional information regarding copyright ownership.
@@ -35,27 +35,27 @@
 				var passDiv = $('#passCont');
 
 		        $.ajax({
-					url : "/api/v1/public/getGeneralConfig",
+					url : "/api/v1/public/config",
 					type: "GET",
 					async: true,
 					dataType: 'json',
-						success:function(results,status) 
+						success:function(results,status)
 						{
 							for(var i in results.vNameValues){
 								if ((results.vNameValues[i].name === "simpleAuth") && (results.vNameValues[i].value === "true")){
 									$("#password").val("");
 									passDiv.hide();
-								}	
+								}
 								if (passDiv.is(':hidden'))
 								   $("#password").prop('required',false);
 							}
 						},
-						error: function(errorThrown) 
+						error: function(errorThrown)
 						{
 						    passDiv.show();
 						},
 			            complete : function(){
-			                $('[data-id="loader"]').hide();            
+			                $('[data-id="loader"]').hide();
 			            }
 				});
 			});
@@ -105,7 +105,7 @@
 						</div>
 						<div data-id="loader" class="loading"></div>
 					</div>
-					</div>			
+					</div>
 		</div>
 
 		<script type="text/javascript">
@@ -116,20 +116,20 @@
 				    var postData = {};
 				    var formURL = ($(this).attr("action")) ? $(this).attr("action") : "/login";
 				    postData = {"username" : $("#username").val() , "password" : $("#password").val()};
-				 
+
 					$.ajax({
 						url : formURL,
 						type: "POST",
 						data : postData,
-							success:function(results,status) 
+							success:function(results,status)
 							{
 							    window.location = 'index.html'+window.location.search;
 							},
-							error: function(errorThrown) 
+							error: function(errorThrown)
 							{
 							    showError(errorThrown);
 							}
-						});				    
+						});
 				 		return false;
 				});
 
@@ -139,11 +139,11 @@
 
 				function showError(errorThrown){
 					var errorMsg = errorThrown.status;
-					
+
 				    switch(errorMsg){
-		                case 401: $('.errorBox').show(); 
+		                case 401: $('.errorBox').show();
 		                        break;
-		              
+
 		                default: $('.errorBox').hide();
 				    }
 				}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VAuditLogListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VAuditLogListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VAuditLogListBase.js
index 12f7c31..0f72697 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VAuditLogListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VAuditLogListBase.js
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,18 +17,18 @@
  * under the License.
  */
 
- 
+
 define(['require',
 	'collections/BaseCollection',
 	'utils/Globals',
 	'models/VAuditLog'
 ],function(require,BaseCollection,Globals,VAuditLog){
-	'use strict';	
+	'use strict';
 
 	var VAuditLogListBase = BaseCollection.extend(
 	/** @lends VAuditLogListBase.prototype */
 	{
-		url: Globals.baseURL + 'audit/getAuditLogs',
+		url: Globals.baseURL + 'audit/logs',
 
 		model : VAuditLog,
 
@@ -49,8 +49,8 @@ define(['require',
 		 *************************/
 
 		getAuditSchemaFieldsName : function(token, options){
-			var url = Globals.baseURL  + 'audit/getAuditSchemaFieldsName';
-			
+			var url = Globals.baseURL  + 'audit/logs/schema/fields';
+
 			options = _.extend({
 				data : $.param(token),
 				contentType : 'application/json',
@@ -71,5 +71,3 @@ define(['require',
 
     return VAuditLogListBase;
 });
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/f8cf23e4/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VEventHistoryListBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VEventHistoryListBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VEventHistoryListBase.js
index f6f720d..06b2619 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VEventHistoryListBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/collection_bases/VEventHistoryListBase.js
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -24,11 +24,11 @@ define(['require',
     'models/VEventHistory'
 ],function(require,BaseCollection,Globals,VEventHistory) {
     'use strict';
-    
+
     var VEventHistoryListBase = BaseCollection.extend(
         /** @lends VEventHistoryListBase.prototype */
         {
-            url: Globals.baseURL + 'userconfig/getUserConfig',
+            url: Globals.baseURL + 'userconfig',
 
             model: VEventHistory,
 
@@ -48,7 +48,7 @@ define(['require',
              *************************/
 
             saveEventHistory: function(postData, options) {
-                var url = Globals.baseURL + 'userconfig/saveUserConfig';
+                var url = Globals.baseURL + 'userconfig';
 
                 options = _.extend({
                     data: JSON.stringify(postData),
@@ -59,7 +59,7 @@ define(['require',
                 return this.constructor.nonCrudOperation.call(this, url, 'POST', options);
             },
             saveDashboard: function(postData, options) {
-                var url = Globals.baseURL + 'userconfig/saveUserConfig';
+                var url = Globals.baseURL + 'userconfig';
 
                 options = _.extend({
                     data: JSON.stringify(postData),
@@ -70,7 +70,7 @@ define(['require',
                 return this.constructor.nonCrudOperation.call(this, url, 'POST', options);
             },
             deleteEventHistory: function(postData, options) {
-                var url = Globals.baseURL + 'userconfig/deleteUserConfig/' + postData.id;
+                var url = Globals.baseURL + 'userconfig/' + postData.id;
 
                 options = _.extend({
                     contentType: 'application/json',


[42/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code - Part 2 (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
index a02d585..2282c11 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractCommonServiceLogRequestConverter.java
@@ -18,37 +18,38 @@
  */
 package org.apache.ambari.logsearch.query.converter;
 
-import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
-import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonServiceLogSearchCriteria;
 import org.apache.commons.lang.StringEscapeUtils;
 
-public abstract class AbstractCommonServiceLogRequestConverter<SOURCE extends BaseServiceLogRequest, RESULT extends CommonSearchCriteria>
+public abstract class AbstractCommonServiceLogRequestConverter<SOURCE extends BaseServiceLogRequest, RESULT extends CommonServiceLogSearchCriteria>
   extends AbstractCommonSearchRequestConverter<SOURCE, RESULT> {
 
   @Override
   public RESULT convertToSearchCriteria(SOURCE request) {
     RESULT criteria = createCriteria(request);
+    // TODO: check are these used from the UI or not?
     criteria.addParam("advanceSearch", StringEscapeUtils.unescapeXml(request.getAdvancedSearch()));
     criteria.addParam("q", request.getQuery());
-    criteria.addParam("treeParams", StringEscapeUtils.unescapeHtml(request.getTreeParams()));
-    criteria.addParam("level", request.getLevel());
     criteria.addParam("gMustNot", request.getgMustNot());
-    criteria.addParam("from", request.getFrom());
-    criteria.addParam("to", request.getTo());
-    criteria.addParam("selectComp", request.getMustBe());
-    criteria.addParam("unselectComp", request.getMustNot());
-    criteria.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getiMessage()));
     criteria.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
-    criteria.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getgEMessage()));
-    criteria.addParam(LogSearchConstants.BUNDLE_ID, request.getBundleId());
-    criteria.addParam("host_name", request.getHostName());
-    criteria.addParam("component_name", request.getComponentName());
-    criteria.addParam("file_name", request.getFileName());
-    criteria.addParam("startDate", request.getStartTime());
-    criteria.addParam("endDate", request.getEndTime());
-    criteria.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
-    criteria.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
+    criteria.addParam("unselectComp", request.getMustNot());
+
+    criteria.setTreeParams(StringEscapeUtils.unescapeHtml(request.getTreeParams()));
+    criteria.setLevel(request.getLevel());
+    criteria.setFrom(request.getFrom());
+    criteria.setTo(request.getTo());
+    criteria.setSelectComp(request.getMustBe());
+    criteria.setBundleId(request.getBundleId());
+    criteria.setHostName(request.getHostName());
+    criteria.setComponentName(request.getComponentName());
+    criteria.setFileName(request.getFileName());
+    criteria.setIncludeMessage(StringEscapeUtils.unescapeXml(request.getiMessage()));
+    criteria.setExcludeMessage(StringEscapeUtils.unescapeXml(request.getgEMessage()));
+    criteria.setStartTime(request.getStartTime());
+    criteria.setEndTime(request.getEndTime());
+    criteria.setExcludeQuery(StringEscapeUtils.unescapeXml(request.getExcludeQuery()));
+    criteria.setIncludeQuery(StringEscapeUtils.unescapeXml(request.getIncludeQuery()));
     return criteria;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractConverterAware.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractConverterAware.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractConverterAware.java
new file mode 100644
index 0000000..18a71c1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AbstractConverterAware.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.converter;
+
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.core.convert.ConversionService;
+import org.springframework.core.convert.converter.Converter;
+import org.springframework.core.convert.converter.ConverterRegistry;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+
+public abstract class AbstractConverterAware<SOURCE, RESULT> implements Converter<SOURCE, RESULT> {
+
+  @Inject
+  @Qualifier("conversionService")
+  private ConversionService conversionService;
+
+  public ConversionService getConversionService() {
+    return conversionService;
+  }
+
+  @PostConstruct
+  private void register() {
+    if (conversionService instanceof ConverterRegistry) {
+      ((ConverterRegistry) conversionService).addConverter(this);
+    } else {
+      throw new IllegalStateException("Can't register Converter to ConverterRegistry");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
index 0372168..1639563 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AnyGraphRequestConverter.java
@@ -28,12 +28,12 @@ public class AnyGraphRequestConverter extends AbstractCommonSearchRequestConvert
   @Override
   public AnyGraphSearchCriteria convertToSearchCriteria(AnyGraphRequest anyGraphRequest) {
     AnyGraphSearchCriteria criteria = new AnyGraphSearchCriteria();
-    criteria.addParam("xAxis", anyGraphRequest.getxAxis());
-    criteria.addParam("yAxis", anyGraphRequest.getyAxis());
-    criteria.addParam("stackBy", anyGraphRequest.getStackBy());
-    criteria.addParam("unit", anyGraphRequest.getUnit());
-    criteria.addParam("from", anyGraphRequest.getFrom());
-    criteria.addParam("to", anyGraphRequest.getTo());
+    criteria.setxAxis(anyGraphRequest.getxAxis());
+    criteria.setyAxis(anyGraphRequest.getyAxis());
+    criteria.setStackBy(anyGraphRequest.getStackBy());
+    criteria.setUnit(anyGraphRequest.getUnit());
+    criteria.setFrom(anyGraphRequest.getFrom());
+    criteria.setTo(anyGraphRequest.getTo());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
index f72a673..ac74287 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditBarGraphRequestConverter.java
@@ -28,7 +28,7 @@ public class AuditBarGraphRequestConverter extends AbstractCommonAuditLogRequest
   @Override
   public AuditBarGraphSearchCriteria createCriteria(AuditBarGraphRequest request) {
     AuditBarGraphSearchCriteria criteria = new AuditBarGraphSearchCriteria();
-    criteria.addParam("unit", request.getUnit());
+    criteria.setUnit(request.getUnit());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
index 27d314d..5ec7632 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/AuditLogRequestConverter.java
@@ -28,7 +28,7 @@ public class AuditLogRequestConverter extends AbstractCommonAuditLogRequestConve
   @Override
   public AuditLogSearchCriteria createCriteria(AuditLogRequest request) {
     AuditLogSearchCriteria criteria = new AuditLogSearchCriteria();
-    criteria.addParam("isLastPage", request.isLastPage());
+    criteria.setLastPage(request.isLastPage());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
index cfd544c..efc9bc9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/BaseServiceLogRequestConverter.java
@@ -20,13 +20,14 @@ package org.apache.ambari.logsearch.query.converter;
 
 import org.apache.ambari.logsearch.model.request.impl.BaseServiceLogRequest;
 import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonServiceLogSearchCriteria;
 import org.springframework.stereotype.Component;
 
 @Component
-public class BaseServiceLogRequestConverter extends AbstractCommonServiceLogRequestConverter<BaseServiceLogRequest, CommonSearchCriteria> {
+public class BaseServiceLogRequestConverter extends AbstractCommonServiceLogRequestConverter<BaseServiceLogRequest, CommonServiceLogSearchCriteria> {
 
   @Override
-  public CommonSearchCriteria createCriteria(BaseServiceLogRequest request) {
-    return new CommonSearchCriteria();
+  public CommonServiceLogSearchCriteria createCriteria(BaseServiceLogRequest request) {
+    return new CommonServiceLogSearchCriteria();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
index 089b593..6197d48 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldAuditLogRequestConverter.java
@@ -28,7 +28,7 @@ public class FieldAuditLogRequestConverter extends AbstractCommonAuditLogRequest
   @Override
   public FieldAuditLogSearchCriteria createCriteria(FieldAuditLogRequest request) {
     FieldAuditLogSearchCriteria criteria = new FieldAuditLogSearchCriteria();
-    criteria.addParam("field", request.getField());
+    criteria.setField(request.getField());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
index dd518f8..74b0dac 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/FieldBarGraphRequestConverter.java
@@ -28,8 +28,8 @@ public class FieldBarGraphRequestConverter extends AbstractCommonAuditLogRequest
   @Override
   public FieldAuditBarGraphSearchCriteria createCriteria(FieldAuditBarGraphRequest request) {
     FieldAuditBarGraphSearchCriteria criteria = new FieldAuditBarGraphSearchCriteria();
-    criteria.addParam("unit", request.getUnit());
-    criteria.addParam("field", request.getField());
+    criteria.setUnit(request.getUnit());
+    criteria.setField(request.getField());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
index 7c3038e..62c2fbe 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileRequestConverter.java
@@ -29,9 +29,9 @@ public class LogFileRequestConverter implements Converter<LogFileRequest, LogFil
   @Override
   public LogFileSearchCriteria convert(LogFileRequest request) {
     LogFileSearchCriteria criteria = new LogFileSearchCriteria();
-    criteria.addParam("component", request.getComponent());
-    criteria.addParam("host", request.getHost());
-    criteria.addParam("logType", request.getLogType());
+    criteria.setLogFileComponent(request.getComponent());
+    criteria.setLogFileHost(request.getHost());
+    criteria.setLogType(request.getLogType());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
index 88b1a34..08c6ecc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/LogFileTailRequestConverter.java
@@ -29,10 +29,10 @@ public class LogFileTailRequestConverter implements Converter<LogFileTailRequest
   @Override
   public LogFileTailSearchCriteria convert(LogFileTailRequest request) {
     LogFileTailSearchCriteria criteria = new LogFileTailSearchCriteria();
-    criteria.addParam("component", request.getComponent());
-    criteria.addParam("host", request.getHost());
-    criteria.addParam("logType", request.getLogType());
-    criteria.addParam("tailSize", request.getTailSize());
+    criteria.setLogFileComponent(request.getComponent());
+    criteria.setLogFileHost(request.getHost());
+    criteria.setLogType(request.getLogType());
+    criteria.setLogTailSize(request.getTailSize());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
index 7a559de..8f1aaf0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceAnyGraphRequestConverter.java
@@ -28,12 +28,12 @@ public class ServiceAnyGraphRequestConverter extends AbstractCommonServiceLogReq
   @Override
   public ServiceAnyGraphSearchCriteria createCriteria(ServiceAnyGraphRequest anyGraphRequest) {
     ServiceAnyGraphSearchCriteria criteria = new ServiceAnyGraphSearchCriteria();
-    criteria.addParam("xAxis", anyGraphRequest.getxAxis());
-    criteria.addParam("yAxis", anyGraphRequest.getyAxis());
-    criteria.addParam("stackBy", anyGraphRequest.getStackBy());
-    criteria.addParam("unit", anyGraphRequest.getUnit());
-    criteria.addParam("from", anyGraphRequest.getFrom());
-    criteria.addParam("to", anyGraphRequest.getTo());
+    criteria.setxAxis(anyGraphRequest.getxAxis());
+    criteria.setyAxis(anyGraphRequest.getyAxis());
+    criteria.setStackBy(anyGraphRequest.getStackBy());
+    criteria.setUnit(anyGraphRequest.getUnit());
+    criteria.setFrom(anyGraphRequest.getFrom());
+    criteria.setTo(anyGraphRequest.getTo());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
index fe81468..489e879 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceExtremeDatesRequestConverter.java
@@ -18,7 +18,6 @@
  */
 package org.apache.ambari.logsearch.query.converter;
 
-import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.model.request.impl.ServiceExtremeDatesRequest;
 import org.apache.ambari.logsearch.query.model.ServiceExtremeDatesCriteria;
 import org.springframework.stereotype.Component;
@@ -29,7 +28,7 @@ public class ServiceExtremeDatesRequestConverter extends AbstractCommonSearchReq
   @Override
   public ServiceExtremeDatesCriteria convertToSearchCriteria(ServiceExtremeDatesRequest request) {
     ServiceExtremeDatesCriteria criteria = new ServiceExtremeDatesCriteria();
-    criteria.addParam(LogSearchConstants.BUNDLE_ID, request.getBundleId());
+    criteria.setBundleId(request.getBundleId());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
index 19165c0..6c501ae 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceGraphRequestConverter.java
@@ -28,9 +28,9 @@ public class ServiceGraphRequestConverter extends AbstractCommonServiceLogReques
   @Override
   public ServiceGraphSearchCriteria createCriteria(ServiceGraphRequest request) {
     ServiceGraphSearchCriteria criteria = new ServiceGraphSearchCriteria();
-    criteria.addParam("hostLogFile", request.getHostLogFile());
-    criteria.addParam("compLogFile", request.getComponentLogFile());
-    criteria.addParam("unit", request.getUnit());
+    criteria.setLogFileHostName(request.getHostLogFile());
+    criteria.setLogFileComponentName(request.getComponentLogFile());
+    criteria.setUnit(request.getUnit());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
index 7d83e49..bd06546 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogExportRequestConverter.java
@@ -28,11 +28,10 @@ public class ServiceLogExportRequestConverter extends AbstractCommonServiceLogRe
   @Override
   public ServiceLogExportSearchCriteria createCriteria(ServiceLogExportRequest request) {
     ServiceLogExportSearchCriteria criteria = new ServiceLogExportSearchCriteria();
-    criteria.addParam("hostLogFile", request.getHostLogFile());
-    criteria.addParam("compLogFile",
-      request.getComponentLogFile());
-    criteria.addParam("format", request.getFormat());
-    criteria.addParam("utcOffset", request.getUtcOffset());
+    criteria.setLogFileHostName(request.getHostLogFile());
+    criteria.setLogFileComponentName(request.getComponentLogFile());
+    criteria.setFormat(request.getFormat());
+    criteria.setUtcOffset(request.getUtcOffset());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
index f5148f3..392c9d3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogFileRequestConverter.java
@@ -29,8 +29,8 @@ public class ServiceLogFileRequestConverter
   @Override
   public ServiceLogFileSearchCriteria createCriteria(ServiceLogFileRequest request) {
     ServiceLogFileSearchCriteria criteria = new ServiceLogFileSearchCriteria();
-    criteria.addParam("hostLogFile", request.getHostLogFile());
-    criteria.addParam("compLogFile", request.getComponentLogFile());
+    criteria.setLogFileHostName(request.getHostLogFile());
+    criteria.setLogFileComponentName(request.getComponentLogFile());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
index 6a70d55..aa93c6a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogRequestConverter.java
@@ -29,13 +29,13 @@ public class ServiceLogRequestConverter extends AbstractCommonServiceLogRequestC
   @Override
   public ServiceLogSearchCriteria createCriteria(ServiceLogRequest request) {
     ServiceLogSearchCriteria criteria = new ServiceLogSearchCriteria();
-    criteria.addParam("hostLogFile", request.getHostLogFile());
-    criteria.addParam("compLogFile", request.getComponentLogFile());
-    criteria.addParam("keyword", StringEscapeUtils.unescapeXml(request.getKeyWord()));
-    criteria.addParam("sourceLogId", request.getSourceLogId());
-    criteria.addParam("keywordType", request.getKeywordType());
-    criteria.addParam("token", request.getToken());
-    criteria.addParam("isLastPage", request.isLastPage());
+    criteria.setLogFileComponentName(request.getHostLogFile());
+    criteria.setLogFileComponentName(request.getComponentLogFile());
+    criteria.setKeyword(StringEscapeUtils.unescapeXml(request.getKeyWord()));
+    criteria.setKeywordType(request.getKeywordType());
+    criteria.setSourceLogId(request.getSourceLogId());
+    criteria.setToken(request.getToken());
+    criteria.setLastPage(request.isLastPage());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
index 676f049..1e58b79 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/ServiceLogTruncatedRequestConverter.java
@@ -28,11 +28,11 @@ public class ServiceLogTruncatedRequestConverter extends AbstractCommonServiceLo
   @Override
   public ServiceLogTruncatedSearchCriteria createCriteria(ServiceLogTruncatedRequest request) {
     ServiceLogTruncatedSearchCriteria criteria = new ServiceLogTruncatedSearchCriteria();
-    criteria.addParam("hostLogFile", request.getHostLogFile());
-    criteria.addParam("compLogFile", request.getComponentLogFile());
-    criteria.addParam("id", request.getId());
-    criteria.addParam("scrollType", request.getScrollType());
-    criteria.addParam("numberRows", request.getNumberRows());
+    criteria.setLogFileHostName(request.getHostLogFile());
+    criteria.setLogFileComponentName(request.getComponentLogFile());
+    criteria.setId(request.getId());
+    criteria.setScrollType(request.getScrollType());
+    criteria.setNumberRows(request.getNumberRows());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
index 8c50f66..1f084fd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/SimpleQueryRequestConverter.java
@@ -19,16 +19,17 @@
 package org.apache.ambari.logsearch.query.converter;
 
 import org.apache.ambari.logsearch.model.request.impl.SimpleQueryRequest;
-import org.apache.ambari.logsearch.query.model.SearchCriteria;
-import org.springframework.core.convert.converter.Converter;
+import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
 import org.springframework.stereotype.Component;
 
 @Component
-public class SimpleQueryRequestConverter implements Converter<SimpleQueryRequest, SearchCriteria> {
+public class SimpleQueryRequestConverter extends AbstractConverterAware<SimpleQueryRequest, CommonSearchCriteria> {
+
   @Override
-  public SearchCriteria convert(SimpleQueryRequest simpleQueryRequest) {
-    SearchCriteria searchCriteria = new SearchCriteria();
+  public CommonSearchCriteria convert(SimpleQueryRequest simpleQueryRequest) {
+    CommonSearchCriteria searchCriteria = new CommonSearchCriteria();
     searchCriteria.addParam("q", simpleQueryRequest.getQuery());
     return searchCriteria;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
index 50847c7..8f2aaa0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserConfigRequestConverter.java
@@ -18,21 +18,19 @@
  */
 package org.apache.ambari.logsearch.query.converter;
 
-import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.model.request.impl.UserConfigRequest;
 import org.apache.ambari.logsearch.query.model.UserConfigSearchCriteria;
-import org.springframework.core.convert.converter.Converter;
 import org.springframework.stereotype.Component;
 
 @Component
-public class UserConfigRequestConverter implements Converter<UserConfigRequest, UserConfigSearchCriteria> {
+public class UserConfigRequestConverter extends AbstractConverterAware<UserConfigRequest, UserConfigSearchCriteria> {
 
   @Override
   public UserConfigSearchCriteria convert(UserConfigRequest request) {
     UserConfigSearchCriteria criteria = new UserConfigSearchCriteria();
-    criteria.addParam(LogSearchConstants.USER_NAME, request.getUserId());
-    criteria.addParam(LogSearchConstants.FILTER_NAME, request.getFilterName());
-    criteria.addParam(LogSearchConstants.ROW_TYPE, request.getRowType());
+    criteria.setUserName(request.getUserId());
+    criteria.setFilterName(request.getFilterName());
+    criteria.setRowType(request.getRowType());
     return criteria;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
index a0b5f0f..c7f738e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/converter/UserExportRequestConverter.java
@@ -28,7 +28,8 @@ public class UserExportRequestConverter extends AbstractCommonAuditLogRequestCon
   @Override
   public UserExportSearchCriteria createCriteria(UserExportRequest request) {
     UserExportSearchCriteria criteria = new UserExportSearchCriteria();
-    criteria.addParam("field", request.getField());
+    criteria.setField(request.getField());
+    criteria.setFormat(request.getFormat());
     return criteria;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
index a11c056..aa61851 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AnyGraphSearchCriteria.java
@@ -18,8 +18,60 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_FROM;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_STACK_BY;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_TO;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_UNIT;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_X_AXIS;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_Y_AXIS;
 
-@Marker
 public class AnyGraphSearchCriteria extends CommonSearchCriteria {
+
+  public String getxAxis() {
+    return getParam(PARAM_X_AXIS, String.class);
+  }
+
+  public void setxAxis(String xAxis) {
+    addParam(PARAM_X_AXIS, xAxis);
+  }
+
+  public String getyAxis() {
+    return getParam(PARAM_Y_AXIS, String.class);
+  }
+
+  public void setyAxis(String yAxis) {
+    addParam(PARAM_Y_AXIS, yAxis);
+  }
+
+  public String getStackBy() {
+    return getParam(PARAM_STACK_BY, String.class);
+  }
+
+  public void setStackBy(String stackBy) {
+    addParam(PARAM_STACK_BY, stackBy);
+  }
+
+  public String getUnit() {
+    return getParam(PARAM_UNIT, String.class);
+  }
+
+  public void setUnit(String unit) {
+    addParam(PARAM_UNIT, unit);
+  }
+
+  public String getFrom() {
+    return getParam(PARAM_FROM, String.class);
+  }
+
+  public void setFrom(String from) {
+    addParam(PARAM_FROM, from);
+  }
+
+  public String getTo() {
+    return getParam(PARAM_TO, String.class);
+  }
+
+  public void setTo(String to) {
+    addParam(PARAM_TO, to);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
index c41ec15..49304c4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditBarGraphSearchCriteria.java
@@ -18,8 +18,16 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_UNIT;
 
-@Marker
 public class AuditBarGraphSearchCriteria extends CommonSearchCriteria {
+
+  public void setUnit(String unit) {
+    addParam(PARAM_UNIT, unit);
+  }
+
+  public String getUnit() {
+    return getParam(PARAM_UNIT, String.class);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
index f4fe207..03df3ad 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/AuditLogSearchCriteria.java
@@ -18,8 +18,16 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_IS_LAST_PAGE;
 
-@Marker
 public class AuditLogSearchCriteria extends CommonSearchCriteria {
+
+  public void setLastPage(boolean lastPage) {
+    addParam(PARAM_IS_LAST_PAGE, lastPage);
+  }
+
+  public boolean isLastPage() {
+    return getParam(PARAM_IS_LAST_PAGE, Boolean.class);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
index 47d12e5..ffdb2c7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonSearchCriteria.java
@@ -18,83 +18,78 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_END_TIME;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_EXCLUDE_MESSAGE;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_EXCLUDE_QUERY;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_INCLUDE_MESSAGE;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_INCLUDE_QUERY;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_MUST_BE_STRING;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_MUST_NOT_STRING;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_START_TIME;
+
 public class CommonSearchCriteria extends SearchCriteria {
-  private int startIndex = 0;
-  private int maxRows = Integer.MAX_VALUE;
-  private String sortBy = null;
-  private String sortType = null;
-  private int page = 0;
 
-  private String globalStartTime = null;
-  private String globalEndTime = null;
+  public String getIncludeMessage() {
+    return getParam(PARAM_INCLUDE_MESSAGE, String.class);
+  }
+
+  public void setIncludeMessage(String includeMessage) {
+    addParam(PARAM_INCLUDE_MESSAGE, includeMessage);
+  }
 
-  @Override
-  public int getStartIndex() {
-    return startIndex;
+  public String getExcludeMessage() {
+    return getParam(PARAM_EXCLUDE_MESSAGE, String.class);
   }
 
-  @Override
-  public void setStartIndex(int startIndex) {
-    this.startIndex = startIndex;
+  public void setExcludeMessage(String excludeMessage) {
+    addParam(PARAM_EXCLUDE_MESSAGE, excludeMessage);
   }
 
-  @Override
-  public int getMaxRows() {
-    return maxRows;
+  public String getMustBe() {
+    return getParam(PARAM_MUST_BE_STRING, String.class);
   }
 
-  @Override
-  public void setMaxRows(int maxRows) {
-    this.maxRows = maxRows;
+  public void setMustBe(String mustHave) {
+    addParam(PARAM_MUST_BE_STRING, mustHave);
   }
 
-  @Override
-  public String getSortType() {
-    return sortType;
+  public String getMustNot() {
+    return getParam(PARAM_MUST_NOT_STRING, String.class);
   }
 
-  @Override
-  public void setSortType(String sortType) {
-    this.sortType = sortType;
+  public void setMustNot(String mustNot) {
+    addParam(PARAM_MUST_NOT_STRING, mustNot);
   }
 
-  @Override
-  public String getSortBy() {
-    return sortBy;
+  public String getIncludeQuery() {
+    return getParam(PARAM_INCLUDE_QUERY, String.class);
   }
 
-  @Override
-  public void setSortBy(String sortBy) {
-    this.sortBy = sortBy;
+  public void setIncludeQuery(String includeQuery) {
+    addParam(PARAM_INCLUDE_QUERY, includeQuery);
   }
 
-  @Override
-  public int getPage() {
-    return page;
+  public String getExcludeQuery() {
+    return getParam(PARAM_EXCLUDE_QUERY, String.class);
   }
 
-  @Override
-  public void setPage(int page) {
-    this.page = page;
+  public void setExcludeQuery(String excludeQuery) {
+    addParam(PARAM_EXCLUDE_QUERY, excludeQuery);
   }
 
-  @Override
-  public String getGlobalStartTime() {
-    return globalStartTime;
+  public String getStartTime() {
+    return getParam(PARAM_START_TIME, String.class);
   }
 
-  @Override
-  public void setGlobalStartTime(String globalStartTime) {
-    this.globalStartTime = globalStartTime;
+  public void setStartTime(String startTime) {
+    addParam(PARAM_START_TIME, startTime);
   }
 
-  @Override
-  public String getGlobalEndTime() {
-    return globalEndTime;
+  public String getEndTime() {
+    return getParam(PARAM_END_TIME, String.class);
   }
 
-  @Override
-  public void setGlobalEndTime(String globalEndTime) {
-    this.globalEndTime = globalEndTime;
+  public void setEndTime(String endTime) {
+    addParam(PARAM_END_TIME, endTime);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonServiceLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonServiceLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonServiceLogSearchCriteria.java
new file mode 100644
index 0000000..4c79c69
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/CommonServiceLogSearchCriteria.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.query.model;
+
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
+
+public class CommonServiceLogSearchCriteria extends CommonSearchCriteria {
+
+  public String getTreeParams() {
+    return getParam(SearchCriteriaConstants.PARAM_TREE_PARAMS, String.class);
+  }
+
+  public void setTreeParams(String treeParams) {
+    addParam(SearchCriteriaConstants.PARAM_TREE_PARAMS, treeParams);
+  }
+
+  public String getLevel() {
+    return getParam(SearchCriteriaConstants.PARAM_LEVEL, String.class);
+  }
+
+  public void setLevel(String level) {
+    addParam(SearchCriteriaConstants.PARAM_LEVEL, level);
+  }
+
+  public String getSelectComp() {
+    return getParam(SearchCriteriaConstants.PARAM_SELECT_COMP, String.class);
+  }
+
+  public void setSelectComp(String selectComp) {
+    addParam(SearchCriteriaConstants.PARAM_SELECT_COMP, selectComp);
+  }
+
+  public String getBundleId() {
+    return getParam(SearchCriteriaConstants.PARAM_BUNDLE_ID, String.class);
+  }
+
+  public void setBundleId(String bunldeId) {
+    addParam(SearchCriteriaConstants.PARAM_BUNDLE_ID, bunldeId);
+  }
+
+  public String getFrom() {
+    return getParam(SearchCriteriaConstants.PARAM_FROM ,String.class);
+  }
+
+  public void setFrom(String from) {
+    addParam(SearchCriteriaConstants.PARAM_FROM, from);
+  }
+
+  public String getTo() {
+    return getParam(SearchCriteriaConstants.PARAM_TO ,String.class);
+  }
+
+  public void setTo(String to) {
+    addParam(SearchCriteriaConstants.PARAM_TO, to);
+  }
+
+  public String getHostName() {
+    return getParam(SearchCriteriaConstants.PARAM_HOST_NAME ,String.class);
+  }
+
+  public void setHostName(String hostName) {
+    addParam(SearchCriteriaConstants.PARAM_HOST_NAME, hostName);
+  }
+
+  public String getComponentName() {
+    return getParam(SearchCriteriaConstants.PARAM_COMPONENT_NAME ,String.class);
+  }
+
+  public void setComponentName(String componentName) {
+    addParam(SearchCriteriaConstants.PARAM_COMPONENT_NAME, componentName);
+  }
+
+  public String getFileName() {
+    return getParam(SearchCriteriaConstants.PARAM_FILE_NAME ,String.class);
+  }
+
+  public void setFileName(String fileName) {
+    addParam(SearchCriteriaConstants.PARAM_FILE_NAME, fileName);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
index f931f5d..381b3c2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditBarGraphSearchCriteria.java
@@ -18,8 +18,15 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_FIELD;
 
-@Marker
 public class FieldAuditBarGraphSearchCriteria extends AuditBarGraphSearchCriteria {
+
+  public String getField() {
+    return getParam(PARAM_FIELD, String.class);
+  }
+
+  public void setField(String field) {
+    addParam(PARAM_FIELD, field);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
index 8dd5854..9d5d225 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/FieldAuditLogSearchCriteria.java
@@ -18,8 +18,15 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_FIELD;
 
-@Marker
 public class FieldAuditLogSearchCriteria extends CommonSearchCriteria {
+
+  public String getField() {
+    return getParam(PARAM_FIELD, String.class);
+  }
+
+  public void setField(String field) {
+    addParam(PARAM_FIELD, field);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
index e4e2a14..e33e5eb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileSearchCriteria.java
@@ -20,6 +20,33 @@ package org.apache.ambari.logsearch.query.model;
 
 import org.apache.ambari.logsearch.common.Marker;
 
-@Marker
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_LOG_FILE_COMPONENT;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_LOG_FILE_HOST;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_LOG_FILE_TYPE;
+
 public class LogFileSearchCriteria extends SearchCriteria {
+
+  public String getLogFileComponent() {
+    return getParam(PARAM_LOG_FILE_COMPONENT, String.class);
+  }
+
+  public void setLogFileComponent(String logFileComponent) {
+    addParam(PARAM_LOG_FILE_COMPONENT, logFileComponent);
+  }
+
+  public String getLogFileHost() {
+    return getParam(PARAM_LOG_FILE_HOST, String.class);
+  }
+
+  public void setLogFileHost(String logFileHost) {
+    addParam(PARAM_LOG_FILE_HOST, logFileHost);
+  }
+
+  public String getLogType() {
+    return getParam(PARAM_LOG_FILE_TYPE, String.class);
+  }
+
+  public void setLogType(String logType) {
+    addParam(PARAM_LOG_FILE_TYPE, logType);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
index fecb396..ccea4ab 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/LogFileTailSearchCriteria.java
@@ -18,8 +18,15 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_LOG_TAIL_SIZE;
 
-@Marker
-public class LogFileTailSearchCriteria extends SearchCriteria {
+public class LogFileTailSearchCriteria extends LogFileSearchCriteria {
+
+  public String getLogTailSize() {
+    return getParam(PARAM_LOG_TAIL_SIZE, String.class);
+  }
+
+  public void setLogTailSize(String logTailSize) {
+    addParam(PARAM_LOG_TAIL_SIZE, logTailSize);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
index 091194e..661337f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/SearchCriteria.java
@@ -25,6 +25,7 @@ import java.util.Map;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
 
 public class SearchCriteria {
+
   private int startIndex = 0;
   private int maxRows = Integer.MAX_VALUE;
   private String sortBy = null;
@@ -42,6 +43,30 @@ public class SearchCriteria {
     // Auto-generated constructor stub
   }
 
+
+  public void addParam(String name, Object value) {
+    paramList.put(name, value);
+  }
+
+  public Object getParamValue(String name) {
+    return paramList.get(name);
+  }
+
+  public <T> T getParam(String key, Class<T> type) {
+    if (getParamValue(key) != null) {
+      return (T) getParamValue(key);
+    }
+    return null;
+  }
+
+  public Map<String, Object> getUrlParamMap() {
+    return urlParamMap;
+  }
+
+  public void setUrlParamMap(Map<String, Object> urlParamMap) {
+    this.urlParamMap = urlParamMap;
+  }
+
   public int getStartIndex() {
     return startIndex;
   }
@@ -62,33 +87,6 @@ public class SearchCriteria {
     return sortType;
   }
 
-
-  public void addParam(String name, Object value) {
-    String solrValue = PropertiesHelper.getProperty(name);
-    if (solrValue == null || solrValue.isEmpty()) {
-      paramList.put(name, value);
-    } else {
-      try {
-        String propertyFieldMappings[] = solrValue.split(",");
-        HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
-        for (String temp : propertyFieldMappings) {
-          String arrayValue[] = temp.split(":");
-          propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH), arrayValue[1].toLowerCase(Locale.ENGLISH));
-        }
-        String originalValue = propertyFieldValue.get(value.toString().toLowerCase(Locale.ENGLISH));
-        if (originalValue != null && !originalValue.isEmpty())
-          paramList.put(name, originalValue);
-
-      } catch (Exception e) {
-        //do nothing
-      }
-    }
-  }
-
-  public Object getParamValue(String name) {
-    return paramList.get(name);
-  }
-
   public String getSortBy() {
     return sortBy;
   }
@@ -125,12 +123,4 @@ public class SearchCriteria {
     this.globalEndTime = globalEndTime;
   }
 
-  public Map<String, Object> getUrlParamMap() {
-    return urlParamMap;
-  }
-
-  public void setUrlParamMap(Map<String, Object> urlParamMap) {
-    this.urlParamMap = urlParamMap;
-  }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
index 0ef5bdf..cd852b5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceAnyGraphSearchCriteria.java
@@ -20,6 +20,41 @@ package org.apache.ambari.logsearch.query.model;
 
 import org.apache.ambari.logsearch.common.Marker;
 
-@Marker
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_STACK_BY;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_UNIT;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_X_AXIS;
+import static org.apache.ambari.logsearch.query.SearchCriteriaConstants.PARAM_Y_AXIS;
+
 public class ServiceAnyGraphSearchCriteria extends ServiceLogFileSearchCriteria {
+  public String getxAxis() {
+    return getParam(PARAM_X_AXIS, String.class);
+  }
+
+  public void setxAxis(String xAxis) {
+    addParam(PARAM_X_AXIS, xAxis);
+  }
+
+  public String getyAxis() {
+    return getParam(PARAM_Y_AXIS, String.class);
+  }
+
+  public void setyAxis(String yAxis) {
+    addParam(PARAM_Y_AXIS, yAxis);
+  }
+
+  public String getStackBy() {
+    return getParam(PARAM_STACK_BY, String.class);
+  }
+
+  public void setStackBy(String stackBy) {
+    addParam(PARAM_STACK_BY, stackBy);
+  }
+
+  public String getUnit() {
+    return getParam(PARAM_UNIT, String.class);
+  }
+
+  public void setUnit(String unit) {
+    addParam(PARAM_UNIT, unit);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
index 3fc6ff8..d89ab3b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceExtremeDatesCriteria.java
@@ -18,8 +18,15 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class ServiceExtremeDatesCriteria extends CommonSearchCriteria {
+
+  public String getBundleId() {
+    return getParam(SearchCriteriaConstants.PARAM_BUNDLE_ID, String.class);
+  }
+
+  public void setBundleId(String bunldeId) {
+    addParam(SearchCriteriaConstants.PARAM_BUNDLE_ID, bunldeId);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
index 31a57a4..8988af1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceGraphSearchCriteria.java
@@ -18,8 +18,15 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class ServiceGraphSearchCriteria extends ServiceLogFileSearchCriteria {
+
+  public String getUnit() {
+    return getParam(SearchCriteriaConstants.PARAM_UNIT, String.class);
+  }
+
+  public void setUnit(String unit) {
+    addParam(SearchCriteriaConstants.PARAM_UNIT, unit);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
index 8bab7f0..c7b7a39 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogExportSearchCriteria.java
@@ -18,8 +18,23 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class ServiceLogExportSearchCriteria extends ServiceLogFileSearchCriteria {
+
+  public void setFormat(String format) {
+    addParam(SearchCriteriaConstants.PARAM_FORMAT, format);
+  }
+
+  public String getFormat() {
+    return getParam(SearchCriteriaConstants.PARAM_FORMAT, String.class);
+  }
+
+  public void setUtcOffset(String utcOffset) {
+    addParam(SearchCriteriaConstants.PARAM_UTC_OFFSET, utcOffset);
+  }
+
+  public String getUtcOffset() {
+    return getParam(SearchCriteriaConstants.PARAM_UTC_OFFSET, String.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
index a9f5926..a80a3ce 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogFileSearchCriteria.java
@@ -18,8 +18,23 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
-public class ServiceLogFileSearchCriteria extends CommonSearchCriteria {
+public class ServiceLogFileSearchCriteria extends CommonServiceLogSearchCriteria {
+
+  public void setLogFileHostName(String logFileHostName) {
+    addParam(SearchCriteriaConstants.PARAM_HOST_LOG_FILE, logFileHostName);
+  }
+
+  public String getLogFileHostName() {
+    return getParam(SearchCriteriaConstants.PARAM_HOST_LOG_FILE, String.class);
+  }
+
+  public void setLogFileComponentName(String logFileComponentName) {
+    addParam(SearchCriteriaConstants.PARAM_COMPONENT_LOG_FILE, logFileComponentName);
+  }
+
+  public String getLogFileComponentName() {
+    return getParam(SearchCriteriaConstants.PARAM_COMPONENT_LOG_FILE, String.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
index d41c589..b3a6bf7 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogSearchCriteria.java
@@ -18,8 +18,48 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class ServiceLogSearchCriteria extends ServiceLogFileSearchCriteria {
+
+  public void setKeyword(String keyword) {
+    addParam(SearchCriteriaConstants.PARAM_KEYWORD, keyword);
+  }
+
+  public String getKeyword() {
+    return getParam(SearchCriteriaConstants.PARAM_KEYWORD, String.class);
+  }
+
+  public void setKeywordType(String keywordType) {
+    addParam(SearchCriteriaConstants.PARAM_KEYWORD_TYPE, keywordType);
+  }
+
+  public String getKeywordType() {
+    return getParam(SearchCriteriaConstants.PARAM_KEYWORD_TYPE, String.class);
+  }
+
+  public void setSourceLogId(String sourceLogId) {
+    addParam(SearchCriteriaConstants.PARAM_SOURCE_LOG_ID, sourceLogId);
+  }
+
+  public String getSourceLogId() {
+    return getParam(SearchCriteriaConstants.PARAM_SOURCE_LOG_ID, String.class);
+  }
+
+  public void setToken(String token) {
+    addParam(SearchCriteriaConstants.PARAM_TOKEN, token);
+  }
+
+  public String getToken() {
+    return getParam(SearchCriteriaConstants.PARAM_TOKEN, String.class);
+  }
+
+  public void setLastPage(boolean lastPage) {
+    addParam(SearchCriteriaConstants.PARAM_IS_LAST_PAGE, lastPage);
+  }
+
+  public boolean isLastPage() {
+    return getParam(SearchCriteriaConstants.PARAM_IS_LAST_PAGE, Boolean.class);
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
index 24dc9a8..bcdac5b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/ServiceLogTruncatedSearchCriteria.java
@@ -18,8 +18,31 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class ServiceLogTruncatedSearchCriteria extends ServiceLogFileSearchCriteria {
+
+  public String getId() {
+    return getParam(SearchCriteriaConstants.PARAM_ID, String.class);
+  }
+
+  public void setId(String id) {
+    addParam(SearchCriteriaConstants.PARAM_ID, id);
+  }
+
+  public String getScrollType() {
+    return getParam(SearchCriteriaConstants.PARAM_SCROLL_TYPE, String.class);
+  }
+
+  public void setScrollType(String scrollType) {
+    addParam(SearchCriteriaConstants.PARAM_SCROLL_TYPE, scrollType);
+  }
+
+  public String getNumberRows() {
+    return getParam(SearchCriteriaConstants.PARAM_NUMBER_ROWS, String.class);
+  }
+
+  public void setNumberRows(String numberRows) {
+    addParam(SearchCriteriaConstants.PARAM_NUMBER_ROWS, numberRows);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
index 8798cd6..eafff69 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserConfigSearchCriteria.java
@@ -18,8 +18,31 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class UserConfigSearchCriteria extends CommonSearchCriteria {
+
+  public String getUserName() {
+    return getParam(SearchCriteriaConstants.PARAM_USER_NAME, String.class);
+  }
+
+  public void setUserName(String userName) {
+    addParam(SearchCriteriaConstants.PARAM_USER_NAME, userName);
+  }
+
+  public String getFilterName() {
+    return getParam(SearchCriteriaConstants.PARAM_FILTER_NAME, String.class);
+  }
+
+  public void setFilterName(String filterName) {
+    addParam(SearchCriteriaConstants.PARAM_FILTER_NAME, filterName);
+  }
+
+  public String getRowType() {
+    return getParam(SearchCriteriaConstants.PARAM_ROW_TYPE, String.class);
+  }
+
+  public void setRowType(String rowType) {
+    addParam(SearchCriteriaConstants.PARAM_ROW_TYPE, rowType);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
index 755c673..46d13cc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/model/UserExportSearchCriteria.java
@@ -18,8 +18,14 @@
  */
 package org.apache.ambari.logsearch.query.model;
 
-import org.apache.ambari.logsearch.common.Marker;
+import org.apache.ambari.logsearch.query.SearchCriteriaConstants;
 
-@Marker
 public class UserExportSearchCriteria extends FieldAuditLogSearchCriteria {
+  public void setFormat(String format) {
+    addParam(SearchCriteriaConstants.PARAM_FORMAT, format);
+  }
+
+  public String getFormat() {
+    return getParam(SearchCriteriaConstants.PARAM_FORMAT, String.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
index 82e21e8..ff185f8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditLogsResource.java
@@ -87,7 +87,7 @@ public class AuditLogsResource {
   @Produces({"application/json"})
   @ApiOperation(GET_AUDIT_COMPONENTS_OD)
   public GroupListResponse getAuditComponents(@BeanParam SimpleQueryRequest request) {
-    return auditLogsManager.getAuditComponents(conversionService.convert(request, SearchCriteria.class));
+    return auditLogsManager.getAuditComponents(conversionService.convert(request, CommonSearchCriteria.class));
   }
 
   @GET

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
index 5400825..6cc69d4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsResource.java
@@ -45,6 +45,7 @@ import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
 import org.apache.ambari.logsearch.model.response.NodeListResponse;
 import org.apache.ambari.logsearch.model.response.ServiceLogResponse;
 import org.apache.ambari.logsearch.query.model.CommonSearchCriteria;
+import org.apache.ambari.logsearch.query.model.CommonServiceLogSearchCriteria;
 import org.apache.ambari.logsearch.query.model.SearchCriteria;
 import org.apache.ambari.logsearch.manager.ServiceLogsManager;
 import org.apache.ambari.logsearch.query.model.ServiceAnyGraphSearchCriteria;
@@ -100,7 +101,7 @@ public class ServiceLogsResource {
   @Produces({"application/json"})
   @ApiOperation(GET_AGGREGATED_INFO_OD)
   public GraphDataListResponse getAggregatedInfo(@BeanParam BaseServiceLogRequest request) {
-    return serviceLogsManager.getAggregatedInfo(conversionService.convert(request, CommonSearchCriteria.class));
+    return serviceLogsManager.getAggregatedInfo(conversionService.convert(request, CommonServiceLogSearchCriteria.class));
   }
 
   @GET
@@ -132,7 +133,7 @@ public class ServiceLogsResource {
   @Produces({"application/json"})
   @ApiOperation(GET_TREE_EXTENSION_OD)
   public NodeListResponse getTreeExtension(@QueryParam("hostName") @ApiParam String hostName, @BeanParam ServiceLogFileRequest request) {
-    SearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
+    ServiceLogFileSearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
     searchCriteria.addParam("hostName", hostName); // TODO: use host_name instead - needs UI change
     return serviceLogsManager.getTreeExtension(searchCriteria);
   }
@@ -175,7 +176,7 @@ public class ServiceLogsResource {
   @Produces({"application/json"})
   @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
   public NodeListResponse getHostListByComponent(@BeanParam ServiceLogFileRequest request, @QueryParam("componentName") @ApiParam String componentName) {
-    SearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
+    ServiceLogFileSearchCriteria searchCriteria = conversionService.convert(request, ServiceLogFileSearchCriteria.class);
     searchCriteria.addParam("componentName", componentName); // TODO: use component_name instead - needs UI change
     return serviceLogsManager.getHostListByComponent(searchCriteria);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrAmsClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrAmsClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrAmsClient.java
deleted file mode 100644
index 85ea69d..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrAmsClient.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.solr.metrics;
-
-import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-// TODO: Refactor for failover
-public class SolrAmsClient extends AbstractTimelineMetricsSink {
-  private final String collectorHost;
-
-  public SolrAmsClient(String collectorHost) {
-    this.collectorHost = collectorHost;
-  }
-
-  @Override
-  public String getCollectorUri(String host) {
-    return collectorHost;
-  }
-
-  @Override
-  protected int getTimeoutSeconds() {
-    return 10;
-  }
-
-  @Override
-  protected String getZookeeperQuorum() {
-    return null;
-  }
-
-  @Override
-  protected String getConfiguredCollectors() {
-    return null;
-  }
-
-  @Override
-  protected String getHostname() {
-    return null;
-  }
-
-  @Override
-  protected boolean emitMetrics(TimelineMetrics metrics) {
-    return super.emitMetrics(metrics);
-  }
-
-  @Override
-  protected String getCollectorProtocol() {
-    return null;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrJmxAdapter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrJmxAdapter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrJmxAdapter.java
deleted file mode 100644
index fc58661..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrJmxAdapter.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.solr.metrics;
-
-import java.io.IOException;
-import java.lang.management.MemoryMXBean;
-import java.net.MalformedURLException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-import javax.management.JMX;
-import javax.management.MBeanServerConnection;
-import javax.management.MalformedObjectNameException;
-import javax.management.ObjectName;
-import javax.management.remote.JMXConnector;
-import javax.management.remote.JMXConnectorFactory;
-import javax.management.remote.JMXServiceURL;
-
-import com.sun.management.OperatingSystemMXBean;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@SuppressWarnings("restriction")
-public class SolrJmxAdapter {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrJmxAdapter.class);
-
-  private static final String JMX_SERVICE_URL = "service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi";
-
-  private final JMXServiceURL jmxServiceUrl;
-
-  private JMXConnector jmxConnector;
-  private MBeanServerConnection conn;
-
-  public SolrJmxAdapter(String host, int port) throws MalformedURLException {
-    String url = String.format(JMX_SERVICE_URL, host, port);
-    jmxServiceUrl = new JMXServiceURL(url);
-  }
-
-  public double getProcessCpuLoad() throws MalformedObjectNameException {
-    ObjectName objectName = new ObjectName("java.lang:type=OperatingSystem");
-    OperatingSystemMXBean mxBean = JMX.newMXBeanProxy(conn, objectName, OperatingSystemMXBean.class);
-    return mxBean.getProcessCpuLoad();
-  }
-
-  public Map<String, Long> getMemoryData() throws MalformedObjectNameException {
-    Map<String, Long> memoryData = new HashMap<>();
-    
-    ObjectName objectName = new ObjectName("java.lang:type=Memory");
-    MemoryMXBean mxBean = JMX.newMXBeanProxy(conn, objectName, MemoryMXBean.class);
-    
-    memoryData.put("heapMemoryUsed", mxBean.getHeapMemoryUsage().getUsed());
-    memoryData.put("heapMemoryCommitted", mxBean.getHeapMemoryUsage().getCommitted());
-    memoryData.put("heapMemoryMax", mxBean.getHeapMemoryUsage().getMax());
-    memoryData.put("nonHeapMemoryUsed", mxBean.getNonHeapMemoryUsage().getUsed());
-    memoryData.put("nonHeapMemoryCommitted", mxBean.getNonHeapMemoryUsage().getCommitted());
-    memoryData.put("nonHeapMemoryMax", mxBean.getNonHeapMemoryUsage().getMax());
-    
-    return memoryData;
-  }
-
-  public long getIndexSize() throws Exception {
-    long indexSize = 0;
-
-    ObjectName objectNamePattern = new ObjectName(
-        "solr/*shard*replica*:type=/replication,id=org.apache.solr.handler.ReplicationHandler");
-    Set<ObjectName> objectNames = conn.queryNames(objectNamePattern, null);
-    for (ObjectName objectName : objectNames) {
-      String indexSizeString = (String) conn.getAttribute(objectName, "indexSize");
-      indexSize += getIndexSizeInBytes(indexSizeString);
-    }
-
-    return indexSize;
-  }
-
-  private long getIndexSizeInBytes(String indexSizeString) {
-    String[] tokens = indexSizeString.split(" ");
-    double number = Double.parseDouble(tokens[0]);
-
-    long multiplier = 0;
-    switch (tokens[1]) {
-      case "bytes":
-        multiplier = 1;
-        break;
-      case "KB":
-        multiplier = 1024;
-        break;
-      case "MB":
-        multiplier = 1024 * 1024;
-        break;
-      case "GB":
-        multiplier = 1024 * 1024 * 1024;
-        break;
-      default:
-        throw new IllegalArgumentException("Unknown unit: " + tokens[1]);
-    }
-    
-    return (long)(number * multiplier);
-  }
-
-  public void reConnect() throws IOException {
-    if (jmxConnector != null) {
-      try {
-        jmxConnector.close();
-      } catch (IOException e) {
-        LOG.info("Could not close jmxConnector", e);
-      }
-    }
-
-    connect();
-  }
-
-  public void connect() throws IOException {
-    jmxConnector = JMXConnectorFactory.connect(jmxServiceUrl);
-    conn = jmxConnector.getMBeanServerConnection();
-  }
-}


[35/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
new file mode 100644
index 0000000..13df470
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.Scanner;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.model.response.LogData;
+import org.apache.ambari.logsearch.model.response.LogSearchResponse;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.util.DateUtil;
+import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrException;
+
+import javax.inject.Inject;
+
+public abstract class ManagerBase<LOG_DATA_TYPE extends LogData, SEARCH_RESPONSE extends LogSearchResponse> extends JsonManagerBase {
+  private static final Logger logger = Logger.getLogger(ManagerBase.class);
+
+  @Inject
+  protected QueryGeneration queryGenerator;
+
+  public enum LogType {
+    SERVICE("Service"),
+    AUDIT("Audit");
+    
+    private String label;
+    
+    private LogType(String label) {
+      this.label = label;
+    }
+    
+    public String getLabel() {
+      return label;
+    }
+  }
+
+  public ManagerBase() {
+    super();
+  }
+
+  public String getHadoopServiceConfigJSON() {
+    StringBuilder result = new StringBuilder("");
+
+    // Get file from resources folder
+    ClassLoader classLoader = getClass().getClassLoader();
+    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
+
+    try (Scanner scanner = new Scanner(file)) {
+
+      while (scanner.hasNextLine()) {
+        String line = scanner.nextLine();
+        result.append(line).append("\n");
+      }
+
+      scanner.close();
+
+    } catch (IOException e) {
+      logger.error("Unable to read HadoopServiceConfig.json", e);
+      throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+    String hadoopServiceConfig = result.toString();
+    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
+      return hadoopServiceConfig;
+    }
+    throw RESTErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
+
+  }
+  
+  protected SEARCH_RESPONSE getLastPage(SearchCriteria searchCriteria, String logTimeField, SolrDaoBase solrDoaBase,
+                                    SolrQuery lastPageQuery) {
+    
+    Integer maxRows = searchCriteria.getMaxRows();
+    String givenSortType = searchCriteria.getSortType();
+    searchCriteria = new SearchCriteria();
+    searchCriteria.setSortBy(logTimeField);
+    if (givenSortType == null || givenSortType.equals(LogSearchConstants.DESCENDING_ORDER)) {
+      lastPageQuery.removeSort(LogSearchConstants.LOGTIME);
+      searchCriteria.setSortType(LogSearchConstants.ASCENDING_ORDER);
+    } else {
+      searchCriteria.setSortType(LogSearchConstants.DESCENDING_ORDER);
+    }
+    queryGenerator.setSingleSortOrder(lastPageQuery, searchCriteria);
+
+
+    Long totalLogs = 0l;
+    int startIndex = 0;
+    int numberOfLogsOnLastPage = 0;
+    SEARCH_RESPONSE logResponse = null;
+    try {
+      SolrUtil.setStart(lastPageQuery, 0);
+      SolrUtil.setRowCount(lastPageQuery, maxRows);
+      logResponse = getLogAsPaginationProvided(lastPageQuery, solrDoaBase);
+      totalLogs = countQuery(lastPageQuery,solrDoaBase);
+      startIndex = Integer.parseInt("" + ((totalLogs / maxRows) * maxRows));
+      numberOfLogsOnLastPage = Integer.parseInt("" + (totalLogs - startIndex));
+      logResponse.setStartIndex(startIndex);
+      logResponse.setTotalCount(totalLogs);
+      logResponse.setPageSize(maxRows);
+      List<LOG_DATA_TYPE> docList = logResponse.getLogList();
+      List<LOG_DATA_TYPE> lastPageDocList = new ArrayList<>();
+      logResponse.setLogList(lastPageDocList);
+      int cnt = 0;
+      for(LOG_DATA_TYPE doc:docList){
+        if(cnt<numberOfLogsOnLastPage){
+          lastPageDocList.add(doc);
+        }
+        cnt++;
+      }
+      Collections.reverse(lastPageDocList);
+
+    } catch (SolrException | SolrServerException | IOException | NumberFormatException e) {
+      logger.error("Count Query was not executed successfully",e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+    return logResponse;
+  }
+
+  protected SEARCH_RESPONSE getLogAsPaginationProvided(SolrQuery solrQuery, SolrDaoBase solrDaoBase) {
+    try {
+      QueryResponse response = solrDaoBase.process(solrQuery);
+      SEARCH_RESPONSE logResponse = createLogSearchResponse();
+      SolrDocumentList docList = response.getResults();
+      List<LOG_DATA_TYPE> serviceLogDataList = convertToSolrBeans(response);
+      if (docList != null && !docList.isEmpty()) {
+        logResponse.setLogList(serviceLogDataList);
+        logResponse.setStartIndex((int) docList.getStart());
+        logResponse.setTotalCount(docList.getNumFound());
+        Integer rowNumber = solrQuery.getRows();
+        if (rowNumber == null) {
+          logger.error("No RowNumber was set in solrQuery");
+          return createLogSearchResponse();
+        }
+        logResponse.setPageSize(rowNumber);
+      }
+      return logResponse;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+  
+  protected Long countQuery(SolrQuery query,SolrDaoBase solrDaoBase) throws SolrException, SolrServerException, IOException {
+    query.setRows(0);
+    QueryResponse response = solrDaoBase.process(query);
+    if (response == null) {
+      return 0l;
+    }
+    SolrDocumentList docList = response.getResults();
+    if (docList == null) {
+      return 0l;
+    }
+    return docList.getNumFound();
+  }
+
+  protected String getUnit(String unit) {
+    if (StringUtils.isBlank(unit)) {
+      unit = "+1HOUR";
+    }
+    return unit;
+  }
+
+  protected String getFrom(String from) {
+    if (StringUtils.isBlank(from)) {
+      Date date = DateUtil.getTodayFromDate();
+      try {
+        from = DateUtil.convertGivenDateFormatToSolrDateFormat(date);
+      } catch (ParseException e) {
+        from = "NOW";
+      }
+    }
+    return from;
+  }
+
+  protected String getTo(String to) {
+    if (StringUtils.isBlank(to)) {
+      to = "NOW";
+    }
+    return to;
+  }
+
+  protected abstract List<LOG_DATA_TYPE> convertToSolrBeans(QueryResponse response);
+
+  protected abstract SEARCH_RESPONSE createLogSearchResponse();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
deleted file mode 100644
index c0be79d..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.manager;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.ParseException;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Scanner;
-
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.query.QueryGeneration;
-import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VSolrLogList;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrException;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParseException;
-import com.google.gson.JsonPrimitive;
-import com.google.gson.JsonSerializationContext;
-import com.google.gson.JsonSerializer;
-
-public class MgrBase {
-  private static final Logger logger = Logger.getLogger(MgrBase.class);
-
-  @Autowired
-  protected QueryGeneration queryGenerator;
-
-  private JsonSerializer<Date> jsonDateSerialiazer = null;
-  private JsonDeserializer<Date> jsonDateDeserialiazer = null;
-
-  public enum LogType {
-    SERVICE("Service"),
-    AUDIT("Audit");
-    
-    private String label;
-    
-    private LogType(String label) {
-      this.label = label;
-    }
-    
-    public String getLabel() {
-      return label;
-    }
-  }
-
-  public MgrBase() {
-    jsonDateSerialiazer = new JsonSerializer<Date>() {
-
-      @Override
-      public JsonElement serialize(Date paramT, java.lang.reflect.Type paramType, JsonSerializationContext paramJsonSerializationContext) {
-        return paramT == null ? null : new JsonPrimitive(paramT.getTime());
-      }
-    };
-
-    jsonDateDeserialiazer = new JsonDeserializer<Date>() {
-
-      @Override
-      public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT, JsonDeserializationContext context)
-          throws JsonParseException {
-        return json == null ? null : new Date(json.getAsLong());
-      }
-
-    };
-  }
-
-  protected String convertObjToString(Object obj) {
-    if (obj == null) {
-      return "";
-    }
-
-    Gson gson = new GsonBuilder()
-        .registerTypeAdapter(Date.class, jsonDateSerialiazer)
-        .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create();
-
-    return gson.toJson(obj);
-  }
-
-  public String getHadoopServiceConfigJSON() {
-    StringBuilder result = new StringBuilder("");
-
-    // Get file from resources folder
-    ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
-
-    try (Scanner scanner = new Scanner(file)) {
-
-      while (scanner.hasNextLine()) {
-        String line = scanner.nextLine();
-        result.append(line).append("\n");
-      }
-
-      scanner.close();
-
-    } catch (IOException e) {
-      logger.error("Unable to read HadoopServiceConfig.json", e);
-      throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    String hadoopServiceConfig = result.toString();
-    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
-      return hadoopServiceConfig;
-    }
-    throw RESTErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
-
-  }
-  
-  protected VSolrLogList getLastPage(SearchCriteria searchCriteria, String logTimeField, SolrDaoBase solrDoaBase,
-      SolrQuery lastPageQuery) {
-    
-    Integer maxRows = searchCriteria.getMaxRows();
-    String givenSortType = searchCriteria.getSortType();
-    searchCriteria = new SearchCriteria();
-    searchCriteria.setSortBy(logTimeField);
-    if (givenSortType == null || givenSortType.equals(LogSearchConstants.DESCENDING_ORDER)) {
-      lastPageQuery.removeSort(LogSearchConstants.LOGTIME);
-      searchCriteria.setSortType(LogSearchConstants.ASCENDING_ORDER);
-    } else {
-      searchCriteria.setSortType(LogSearchConstants.DESCENDING_ORDER);
-    }
-    queryGenerator.setSingleSortOrder(lastPageQuery, searchCriteria);
-
-
-    Long totalLogs = 0l;
-    int startIndex = 0;
-    int numberOfLogsOnLastPage = 0;
-    VSolrLogList collection = null;
-    try {
-      SolrUtil.setStart(lastPageQuery, 0);
-      SolrUtil.setRowCount(lastPageQuery, maxRows);
-      collection = getLogAsPaginationProvided(lastPageQuery, solrDoaBase);
-      totalLogs = countQuery(lastPageQuery,solrDoaBase);
-      if(maxRows != null){
-        startIndex = Integer.parseInt("" + ((totalLogs/maxRows) * maxRows));
-        numberOfLogsOnLastPage = Integer.parseInt("" + (totalLogs-startIndex));
-      }
-      collection.setStartIndex(startIndex);
-      collection.setTotalCount(totalLogs);
-      collection.setPageSize(maxRows);
-      SolrDocumentList docList = collection.getList();
-      SolrDocumentList lastPageDocList = new SolrDocumentList();
-      collection.setSolrDocuments(lastPageDocList);
-      int cnt = 0;
-      for(SolrDocument doc:docList){
-        if(cnt<numberOfLogsOnLastPage){
-          lastPageDocList.add(doc);
-        }
-        cnt++;
-      }
-      Collections.reverse(lastPageDocList);
-
-    } catch (SolrException | SolrServerException | IOException | NumberFormatException e) {
-      logger.error("Count Query was not executed successfully",e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-    return collection;
-  }
-
-  protected VSolrLogList getLogAsPaginationProvided(SolrQuery solrQuery, SolrDaoBase solrDaoBase) {
-    try {
-      QueryResponse response = solrDaoBase.process(solrQuery);
-      VSolrLogList collection = new VSolrLogList();
-      SolrDocumentList docList = response.getResults();
-      if (docList != null && !docList.isEmpty()) {
-        collection.setSolrDocuments(docList);
-        collection.setStartIndex((int) docList.getStart());
-        collection.setTotalCount(docList.getNumFound());
-        Integer rowNumber = solrQuery.getRows();
-        if (rowNumber == null) {
-          logger.error("No RowNumber was set in solrQuery");
-          return new VSolrLogList();
-        }
-        collection.setPageSize(rowNumber);
-      }
-      return collection;
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-  
-  protected Long countQuery(SolrQuery query,SolrDaoBase solrDaoBase) throws SolrException, SolrServerException, IOException {
-    query.setRows(0);
-    QueryResponse response = solrDaoBase.process(query);
-    if (response == null) {
-      return 0l;
-    }
-    SolrDocumentList docList = response.getResults();
-    if (docList == null) {
-      return 0l;
-    }
-    return docList.getNumFound();
-  }
-
-  protected String getUnit(String unit) {
-    if (StringUtils.isBlank(unit)) {
-      unit = "+1HOUR";
-    }
-    return unit;
-  }
-
-  protected String getFrom(String from) {
-    if (StringUtils.isBlank(from)) {
-      Date date = DateUtil.getTodayFromDate();
-      try {
-        from = DateUtil.convertGivenDateFormatToSolrDateFormat(date);
-      } catch (ParseException e) {
-        from = "NOW";
-      }
-    }
-    return from;
-  }
-
-  protected String getTo(String to) {
-    if (StringUtils.isBlank(to)) {
-      to = "NOW";
-    }
-    return to;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
new file mode 100644
index 0000000..23f62aa
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicManager.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logsearch.model.response.NameValueData;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.web.security.LogsearchSimpleAuthenticationProvider;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Inject;
+
+@Component
+public class PublicManager extends JsonManagerBase {
+
+  @Inject
+  private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
+
+  public String getGeneralConfig() {
+    NameValueDataListResponse nameValueList = new NameValueDataListResponse();
+    List<NameValueData> nameValues = new ArrayList<>();
+    NameValueData nameValue = new NameValueData();
+    nameValue.setName("simpleAuth");
+    nameValue.setValue("" + simpleAuthenticationProvider.isEnable());
+    nameValues.add(nameValue);
+    nameValueList.setvNameValues(nameValues);
+    return convertObjToString(nameValueList);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
deleted file mode 100644
index 398d270..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.manager;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.logsearch.view.VNameValue;
-import org.apache.ambari.logsearch.view.VNameValueList;
-import org.apache.ambari.logsearch.web.security.LogsearchSimpleAuthenticationProvider;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class PublicMgr extends MgrBase {
-  @Autowired
-  private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
-
-  public String getGeneralConfig() {
-    VNameValueList nameValueList = new VNameValueList();
-    List<VNameValue> nameValues = new ArrayList<VNameValue>();
-    VNameValue nameValue = new VNameValue();
-    nameValue.setName("simpleAuth");
-    nameValue.setValue("" + simpleAuthenticationProvider.isEnable());
-    nameValues.add(nameValue);
-    nameValueList.setVNameValues(nameValues);
-    return convertObjToString(nameValueList);
-  }
-}


[45/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
new file mode 100644
index 0000000..368a930
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.junit.Test;
+
+public class InputManagerTest {
+
+  @Test
+  public void testInputManager_addAndRemoveInputs() {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    Input input4 = strictMock(Input.class);
+    
+    expect(input3.getShortDescription()).andReturn("").times(2);
+    expect(input4.getShortDescription()).andReturn("").once();
+    
+    replay(input1, input2, input3, input4);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.removeInput(input3);
+    manager.removeInput(input4);
+    
+    verify(input1, input2, input3, input4);
+    
+    List<Input> inputList = manager.getInputList();
+    assertEquals(inputList.size(), 2);
+    assertEquals(inputList.get(0), input1);
+    assertEquals(inputList.get(1), input2);
+  }
+
+  @Test
+  public void testInputManager_init() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.init(); expectLastCall();
+    input2.init(); expectLastCall();
+    input3.init(); expectLastCall();
+    
+    expect(input1.isTail()).andReturn(false);
+    expect(input2.isTail()).andReturn(false);
+    expect(input3.isTail()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.init();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_monitor() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    expect(input1.monitor()).andReturn(false);
+    expect(input2.monitor()).andReturn(false);
+    expect(input3.isTail()).andReturn(false);
+    expect(input3.getShortDescription()).andReturn("").once();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.monitor();
+    
+    verify(input1, input2, input3);
+  }
+  
+
+  @Test
+  public void testInputManager_addMetricsContainers() throws Exception {
+    List<MetricData> metrics = new ArrayList<MetricData>();
+    
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.addMetricsContainers(metrics); expectLastCall();
+    input2.addMetricsContainers(metrics); expectLastCall();
+    input3.addMetricsContainers(metrics); expectLastCall();
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.addMetricsContainers(metrics);
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_logStat() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.logStat(); expectLastCall();
+    input2.logStat(); expectLastCall();
+    input3.logStat(); expectLastCall();
+    
+    expect(input1.isReady()).andReturn(true);
+    expect(input2.isReady()).andReturn(true);
+    expect(input3.isReady()).andReturn(false);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.logStats();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManagr_waitOnAllInputs() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    Thread mockThread = strictMock(Thread.class);
+    
+    expect(input1.getThread()).andReturn(null);
+    expect(input2.getThread()).andReturn(null);
+    expect(input3.getThread()).andReturn(mockThread);
+    
+    mockThread.join(); expectLastCall();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.waitOnAllInputs();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_checkInAll() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.lastCheckIn(); expectLastCall();
+    input2.lastCheckIn(); expectLastCall();
+    input3.lastCheckIn(); expectLastCall();
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.checkInAll();
+    
+    verify(input1, input2, input3);
+  }
+
+  @Test
+  public void testInputManager_close() throws Exception {
+    Input input1 = strictMock(Input.class);
+    Input input2 = strictMock(Input.class);
+    Input input3 = strictMock(Input.class);
+    
+    input1.setDrain(true); expectLastCall();
+    input2.setDrain(true); expectLastCall();
+    input3.setDrain(true); expectLastCall();
+    
+    expect(input1.isClosed()).andReturn(true);
+    expect(input2.isClosed()).andReturn(true);
+    expect(input3.isClosed()).andReturn(true);
+    
+    replay(input1, input2, input3);
+    
+    InputManager manager = new InputManager();
+    manager.add(input1);
+    manager.add(input2);
+    manager.add(input3);
+    
+    manager.close();
+    
+    verify(input1, input2, input3);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
new file mode 100644
index 0000000..02ffd47
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.lang.reflect.Field;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class LogConfigHandlerTest {
+  
+  private static LogConfigFetcher mockFetcher;
+  
+  private static final Map<String, Object> CONFIG_MAP = new HashMap<>();
+  static {
+    CONFIG_MAP.put("jsons",
+        "{'filter':{" +
+          "'configured_log_file':{" +
+            "'label':'configured_log_file'," +
+            "'hosts':[]," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':[]}," +
+          "'configured_log_file2':{" +
+            "'label':'configured_log_file2'," +
+            "'hosts':['host1']," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':['FATAL','ERROR','WARN','INFO','DEBUG','TRACE']," +
+            "'expiryTime':'3000-01-01T00:00:00.000Z'}," +
+          "'configured_log_file3':{" +
+            "'label':'configured_log_file3'," +
+            "'hosts':['host1']," +
+            "'defaultLevels':['FATAL','ERROR','WARN','INFO']," +
+            "'overrideLevels':['FATAL','ERROR','WARN','INFO','DEBUG','TRACE']," +
+            "'expiryTime':'1000-01-01T00:00:00.000Z'}" +
+          "}}");
+  }
+  
+  @BeforeClass
+  public static void init() throws Exception {
+    mockFetcher = strictMock(LogConfigFetcher.class);
+    Field f = LogConfigFetcher.class.getDeclaredField("instance");
+    f.setAccessible(true);
+    f.set(null, mockFetcher);
+    expect(mockFetcher.getConfigDoc()).andReturn(CONFIG_MAP).anyTimes();
+    replay(mockFetcher);
+    
+    LogFeederUtil.loadProperties("logfeeder.properties", null);
+    LogConfigHandler.handleConfig();
+    Thread.sleep(1000);
+  }
+  
+  @Test
+  public void testLogConfigHandler_emptyDataAllowed() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed((String)null));
+    assertTrue(FilterLogData.INSTANCE.isAllowed(""));
+    assertTrue(FilterLogData.INSTANCE.isAllowed(Collections.<String, Object> emptyMap()));
+  }
+  
+  @Test
+  public void testLogConfigHandler_notConfiguredLogAllowed() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'not_configured_log_file', 'level':'INFO'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_configuredDataAllow() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file', 'level':'INFO'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_configuredDataDontAllow() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredData() throws Exception {
+    assertTrue(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file2', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredDataDifferentHost() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host2', 'type':'configured_log_file2', 'level':'DEBUG'}"));
+  }
+  
+  @Test
+  public void testLogConfigHandler_overridenConfiguredDataExpired() throws Exception {
+    assertFalse(FilterLogData.INSTANCE.isAllowed("{'host':'host1', 'type':'configured_log_file3', 'level':'DEBUG'}"));
+  }
+  
+  @AfterClass
+  public static void finish() {
+    verify(mockFetcher);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
index 301dea9..667c9ff 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -24,7 +24,6 @@ import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.commons.lang3.time.DateUtils;
 import org.apache.log4j.Logger;
 import org.junit.Test;
@@ -61,7 +60,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_pattern()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -70,7 +69,7 @@ public class MapperDateTest {
     String dateString = "2016-04-08 15:55:23.548";
     Object mappedValue = mapperDate.apply(jsonObj, dateString);
 
-    Date d = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(dateString);
+    Date d = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(dateString);
 
     assertEquals("Value wasn't matched properly", d, mappedValue);
     assertEquals("Value wasn't put into jsonObj", d, jsonObj.remove("someField"));
@@ -130,7 +129,7 @@ public class MapperDateTest {
     LOG.info("testMapperDate_invalidDateStringValue()");
 
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
     assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
@@ -149,7 +148,7 @@ public class MapperDateTest {
     String fieldName = "logtime";
     Calendar currentCalendar = Calendar.getInstance();
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
     String srcDatePattern ="MMM dd HH:mm:ss";
     mapConfigs.put("src_date_pattern", srcDatePattern);
     MapperDate mapperDate = new MapperDate();
@@ -160,7 +159,7 @@ public class MapperDateTest {
     nextMonthCalendar.set(Calendar.MONTH, currentCalendar.get(Calendar.MONTH)+1 );
     String inputDateStr = new SimpleDateFormat("MMM").format(nextMonthCalendar.getTime()) + " 01 12:01:45";
     Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
     String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
     assertEquals(Date.class, mappedDateValue.getClass());
     
@@ -179,7 +178,7 @@ public class MapperDateTest {
     String fieldName = "logtime";
     Calendar currentCalendar = Calendar.getInstance();
     Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", LogFeederUtil.DATE_FORMAT);
+    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
     String srcDatePattern ="MMM dd HH:mm:ss";
     mapConfigs.put("src_date_pattern", srcDatePattern);
     MapperDate mapperDate = new MapperDate();
@@ -187,7 +186,7 @@ public class MapperDateTest {
     Map<String, Object> jsonObj = new HashMap<>();
     String inputDateStr = new SimpleDateFormat("MMM").format(currentCalendar.getTime()) + " 01 12:01:45";
     Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat(LogFeederUtil.DATE_FORMAT).parse(mappedValue.toString());
+    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
     String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
     assertEquals(Date.class, mappedDateValue.getClass());
     int expectedLogYear = currentCalendar.get(Calendar.YEAR);

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
index 6edf766..8ecaad1 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
index df84b8e..fce4308 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java
new file mode 100644
index 0000000..8ee6d00
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/metrics/MetrcisManagerTest.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+import org.easymock.Capture;
+import org.easymock.CaptureType;
+import org.easymock.EasyMock;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class MetrcisManagerTest {
+
+  private MetricsManager manager;
+  private LogFeederAMSClient mockClient;
+  private Capture<TimelineMetrics> capture;
+  
+  @BeforeClass
+  public static void loadProperties() throws Exception {
+    LogFeederUtil.loadProperties("logfeeder.properties", null);
+  }
+  
+  @Before
+  public void init() throws Exception {
+    manager = new MetricsManager();
+    manager.init();
+    
+    mockClient = strictMock(LogFeederAMSClient.class);
+    Field f = MetricsManager.class.getDeclaredField("amsClient");
+    f.setAccessible(true);
+    f.set(manager, mockClient);
+    
+    capture = EasyMock.newCapture(CaptureType.FIRST);
+    mockClient.emitMetrics(EasyMock.capture(capture));
+    EasyMock.expectLastCall().andReturn(true).once();
+    
+    replay(mockClient);
+  }
+  
+  @Test
+  public void testMetricManager_pointInTime() throws Exception {
+    MetricData metricCount1 = new MetricData("metric1", true);
+    metricCount1.value = 123;
+    metricCount1.prevPublishValue = 0;
+    metricCount1.publishCount = 0;
+    
+    manager.useMetrics(Arrays.asList(metricCount1));
+    
+    verify(mockClient);
+    
+    TimelineMetrics metrics = capture.getValue();
+    List<TimelineMetric> metricList = metrics.getMetrics();
+    assertEquals(metricList.size(), 1);
+    
+    TimelineMetric metric = metricList.get(0);
+    assertEquals(metric.getHostName(), "test_host_name");
+    assertEquals(metric.getAppId(), "logfeeder");
+    assertEquals(metric.getMetricName(), "metric1");
+    assertEquals(metric.getType(), "Long");
+    
+    TreeMap<Long, Double> values = metric.getMetricValues();
+    assertEquals(values.size(), 1);
+    assertEquals(values.firstEntry().getValue(), Double.valueOf(123.0));
+  }
+  
+  @Test
+  public void testMetricManager_notPointInTime() throws Exception {
+    MetricData metricCount1 = new MetricData("metric1", false);
+    metricCount1.value = 123;
+    metricCount1.prevPublishValue = 0;
+    metricCount1.publishCount = 0;
+    
+    MetricData metricCount2 = new MetricData("metric1", false);
+    metricCount2.value = 123;
+    metricCount2.prevPublishValue = 100;
+    metricCount2.publishCount = 0;
+    
+    MetricData metricCount3 = new MetricData("metric1", false); // not included due to decrease of count
+    metricCount3.value = 99;
+    metricCount3.prevPublishValue = 100;
+    metricCount3.publishCount = 1;
+    
+    manager.useMetrics(Arrays.asList(metricCount1, metricCount2, metricCount3));
+    
+    verify(mockClient);
+    
+    TimelineMetrics metrics = capture.getValue();
+    List<TimelineMetric> metricList = metrics.getMetrics();
+    assertEquals(metricList.size(), 1);
+    
+    TimelineMetric metric = metricList.get(0);
+    assertEquals(metric.getHostName(), "test_host_name");
+    assertEquals(metric.getAppId(), "logfeeder");
+    assertEquals(metric.getMetricName(), "metric1");
+    assertEquals(metric.getType(), "Long");
+    
+    TreeMap<Long, Double> values = metric.getMetricValues();
+    assertEquals(values.size(), 1);
+    assertEquals(values.firstEntry().getValue(), Double.valueOf(146.0));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java
index a7db3f8..38d4b8b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputKafkaTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -85,8 +85,7 @@ public class OutputKafkaTest {
     EasyMock.replay(mockKafkaProducer);
 
     for (int i = 0; i < 10; i++) {
-      InputMarker inputMarker = new InputMarker();
-      inputMarker.input = EasyMock.mock(Input.class);
+      InputMarker inputMarker = new InputMarker(EasyMock.mock(Input.class), null, 0);
       outputKafka.write("value" + i, inputMarker);
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
new file mode 100644
index 0000000..e103346
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
@@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.output;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.junit.Test;
+
+public class OutputManagerTest {
+
+  @Test
+  public void testOutputManager_addAndRemoveOutputs() {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    Output output4 = strictMock(Output.class);
+    
+    replay(output1, output2, output3, output4);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.retainUsedOutputs(Arrays.asList(output1, output2, output4));
+    
+    verify(output1, output2, output3, output4);
+    
+    List<Output> outputs = manager.getOutputs();
+    assertEquals(outputs.size(), 2);
+    assertEquals(outputs.get(0), output1);
+    assertEquals(outputs.get(1), output2);
+  }
+
+  @Test
+  public void testOutputManager_init() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.init(); expectLastCall();
+    output2.init(); expectLastCall();
+    output3.init(); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.init();
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_write() throws Exception {
+    Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("type", "testType");
+    jsonObj.put("path", "testPath");
+    jsonObj.put("host", "testHost");
+    jsonObj.put("ip", "testIp");
+    jsonObj.put("level", "testLevel");
+    jsonObj.put("id", "testId");
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getContextFields()).andReturn(Collections.<String, String> emptyMap());
+    expect(mockInput.isUseEventMD5()).andReturn(false);
+    expect(mockInput.isGenEventMD5()).andReturn(false);
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.write(jsonObj, inputMarker); expectLastCall();
+    output2.write(jsonObj, inputMarker); expectLastCall();
+    output3.write(jsonObj, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.write(jsonObj, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_write2() throws Exception {
+    String jsonString = "{}";
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.write(jsonString, inputMarker); expectLastCall();
+    output2.write(jsonString, inputMarker); expectLastCall();
+    output3.write(jsonString, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.write(jsonString, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_addMetricsContainers() throws Exception {
+    List<MetricData> metrics = new ArrayList<MetricData>();
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.addMetricsContainers(metrics); expectLastCall();
+    output2.addMetricsContainers(metrics); expectLastCall();
+    output3.addMetricsContainers(metrics); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.addMetricsContainers(metrics);
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_logStat() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.logStat(); expectLastCall();
+    output2.logStat(); expectLastCall();
+    output3.logStat(); expectLastCall();
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.logStats();
+    
+    verify(output1, output2, output3);
+  }
+
+  @Test
+  public void testOutputManager_copyFile() throws Exception {
+    File f = new File("");
+    
+    Input mockInput = strictMock(Input.class);
+    InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
+    
+    output1.copyFile(f, inputMarker); expectLastCall();
+    output2.copyFile(f, inputMarker); expectLastCall();
+    output3.copyFile(f, inputMarker); expectLastCall();
+    
+    replay(output1, output2, output3, mockInput);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.copyFile(f, inputMarker);
+    
+    verify(output1, output2, output3, mockInput);
+  }
+
+  @Test
+  public void testOutputManager_close() throws Exception {
+    Output output1 = strictMock(Output.class);
+    Output output2 = strictMock(Output.class);
+    Output output3 = strictMock(Output.class);
+    
+    output1.setDrain(true); expectLastCall();
+    output2.setDrain(true); expectLastCall();
+    output3.setDrain(true); expectLastCall();
+    
+    output1.close(); expectLastCall();
+    output2.close(); expectLastCall();
+    output3.close(); expectLastCall();
+    
+    expect(output1.isClosed()).andReturn(true);
+    expect(output2.isClosed()).andReturn(true);
+    expect(output3.isClosed()).andReturn(true);
+    
+    replay(output1, output2, output3);
+    
+    OutputManager manager = new OutputManager();
+    manager.add(output1);
+    manager.add(output2);
+    manager.add(output3);
+    
+    manager.close();
+    
+    verify(output1, output2, output3);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
index 20a4f1f..1872135 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -69,15 +69,14 @@ public class OutputS3FileTest {
   @Test
   public void shouldSpoolLogEventToNewSpooler() throws Exception {
 
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       @Override
@@ -98,16 +97,15 @@ public class OutputS3FileTest {
 
   @Test
   public void shouldReuseSpoolerForSamePath() throws Exception {
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     spooler.add("log event block2");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       private boolean firstCallComplete;
@@ -169,16 +167,15 @@ public class OutputS3FileTest {
 
   @Test
   public void shouldUploadFileOnRollover() throws Exception {
-    InputMarker inputMarker = mock(InputMarker.class);
     Input input = mock(Input.class);
-    inputMarker.input = input;
+    InputMarker inputMarker = new InputMarker(input, null, 0);
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
     expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
     s3Uploader.addFileForUpload("/var/ambari-logsearch/logfeeder/hdfs-namenode.log.gz");
-    replay(input, inputMarker, spooler, s3Uploader);
+    replay(input, spooler, s3Uploader);
 
     OutputS3File outputS3File = new OutputS3File() {
       @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
index 33bb33f..8985110 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -97,8 +97,7 @@ public class OutputSolrTest {
         jsonObj.put("name" + ++count, "value" + ++count);
       jsonObj.put("id", ++count);
 
-      InputMarker inputMarker = new InputMarker();
-      inputMarker.input = EasyMock.mock(Input.class);
+      InputMarker inputMarker = new InputMarker(EasyMock.mock(Input.class), null, 0);
       outputSolr.write(jsonObj, inputMarker);
 
       SolrInputDocument doc = new SolrInputDocument();

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
index cc6da56..d1376c4 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3LogPathResolverTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
index c64e0c5..5477f5c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/S3UploaderTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,7 +18,6 @@
 
 package org.apache.ambari.logfeeder.output;
 
-import org.apache.ambari.logfeeder.util.S3Util;
 import org.junit.Test;
 
 import java.io.File;
@@ -46,22 +45,20 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     String resolvedPath = s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
-    verify(s3Util);
     assertEquals("test_path/hdfs_namenode/hdfs_namenode.log.123343493473948.gz", resolvedPath);
   }
 
@@ -74,18 +71,17 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
@@ -102,17 +98,16 @@ public class S3UploaderTest {
     Map<String, Object> configs = setupS3Configs();
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s.%s", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, false, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
 
@@ -131,22 +126,19 @@ public class S3UploaderTest {
 
 
     S3OutputConfiguration s3OutputConfiguration = new S3OutputConfiguration(configs);
-    S3Util s3Util = mock(S3Util.class);
-    String s3Key = String.format("%s/%s/%s/%s.%s", "cl1", TEST_PATH, LOG_TYPE, fileName, GZ);
-    s3Util.uploadFileTos3(TEST_BUCKET, s3Key, compressedFile, ACCESS_KEY_VALUE, SECRET_KEY_VALUE);
     expect(compressedFile.delete()).andReturn(true);
     expect(fileToUpload.delete()).andReturn(true);
-    replay(fileToUpload, compressedFile, s3Util);
+    replay(fileToUpload, compressedFile);
 
-    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, s3Util, true, LOG_TYPE) {
+    S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, true, LOG_TYPE) {
       @Override
       protected File createCompressedFileForUpload(File fileToUpload, String compressionAlgo) {
         return compressedFile;
       }
+      protected void uploadFileToS3(String bucketName, String s3Key, File localFile, String accessKey, String secretKey) {
+      }
     };
     s3Uploader.uploadFile(fileToUpload, LOG_TYPE);
-
-    verify(s3Util);
   }
 
   private Map<String, Object> setupS3Configs() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java
index 7a47039..2cfe9ff 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/spool/LogSpoolerTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
deleted file mode 100644
index 6df2283..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/AWSUtilTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.util;
-
-import org.apache.ambari.logfeeder.util.AWSUtil;
-
-public class AWSUtilTest {
-  public void testAWSUtil_getAwsUserName() throws Exception {
-    String S3_ACCESS_KEY = "S3_ACCESS_KEY";
-    String S3_SECRET_KEY = "S3_SECRET_KEY";
-    AWSUtil.INSTANCE.getAwsUserName(S3_ACCESS_KEY, S3_SECRET_KEY);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java
index 9789a14..43e03c7 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/PlaceholderUtilTest.java
@@ -1,6 +1,4 @@
-package org.apache.ambari.logfeeder.util;
-
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,6 +15,9 @@ package org.apache.ambari.logfeeder.util;
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+package org.apache.ambari.logfeeder.util;
+
 import java.util.HashMap;
 
 import org.junit.Test;
@@ -26,19 +27,18 @@ import static org.junit.Assert.assertEquals;
 public class PlaceholderUtilTest {
   @Test
   public void testPlaceholderUtil_replaceVariables() {
-    HashMap<String, String> contextParam = new HashMap<String, String>();
     String hostName = "host1";
     String ip = "127.0.0.1";
     String clusterName = "test-cluster";
+    
+    HashMap<String, String> contextParam = new HashMap<String, String>();
     contextParam.put("host", hostName);
     contextParam.put("ip", ip);
     contextParam.put("cluster", clusterName);
-    String inputStr = "$CLUSTER/logfeeder/$HOST-$IP/logs";
-    String resultStr = PlaceholderUtil.replaceVariables(inputStr, contextParam);
+    
+    String resultStr = PlaceholderUtil.replaceVariables("$CLUSTER/logfeeder/$HOST-$IP/logs", contextParam);
     String expectedStr = clusterName + "/logfeeder/" + hostName + "-" + ip + "/logs";
-    assertEquals("Result string :" + resultStr
-        + " is not equal to exptected string :" + expectedStr, resultStr,
-        expectedStr);
+    
+    assertEquals("Result string :" + resultStr + " is not equal to exptected string :" + expectedStr, resultStr, expectedStr);
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
index 84554b0..02918be 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/util/S3UtilTest.java
@@ -26,14 +26,14 @@ public class S3UtilTest {
   public void testS3Util_pathToBucketName() throws Exception {
     String s3Path = "s3://bucket_name/path/file.txt";
     String expectedBucketName = "bucket_name";
-    String actualBucketName = S3Util.INSTANCE.getBucketName(s3Path);
+    String actualBucketName = S3Util.getBucketName(s3Path);
     assertEquals(expectedBucketName, actualBucketName);
   }
 
   public void testS3Util_pathToS3Key() throws Exception {
     String s3Path = "s3://bucket_name/path/file.txt";
     String expectedS3key = "path/file.txt";
-    String actualS3key = S3Util.INSTANCE.getS3Key(s3Path);
+    String actualS3key = S3Util.getS3Key(s3Path);
     assertEquals(expectedS3key, actualS3key);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties
new file mode 100644
index 0000000..59020cc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/resources/logfeeder.properties
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+logfeeder.log.filter.enable=true
+logfeeder.solr.config.interval=5
+logfeeder.solr.zk_connect_string=some_connect_string
+logfeeder.metrics.collector.hosts=some_collector_host
+node.hostname=test_host_name
\ No newline at end of file


[22/50] [abbrv] ambari git commit: AMBARI-18253. Fix LogSearch utility classes (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
index e3c2063..505b74d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
@@ -33,18 +33,15 @@ import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.view.VHost;
 import org.apache.ambari.logsearch.view.VSummary;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
 
-@Component
 public class FileUtil {
-
   private static final Logger logger = Logger.getLogger(FileUtil.class);
 
-  @Autowired
-  private RESTErrorUtil restErrorUtil;
-
-  public Response saveToFile(String text, String fileName, VSummary vsummary) {
+  private FileUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static Response saveToFile(String text, String fileName, VSummary vsummary) {
     String mainExportedFile = "";
     FileOutputStream fis = null;
     try {
@@ -107,7 +104,7 @@ public class FileUtil {
         .build();
     } catch (Exception e) {
       logger.error(e.getMessage());
-      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
     } finally {
       if (fis != null) {
         try {
@@ -118,7 +115,7 @@ public class FileUtil {
     }
   }
 
-  public File getFileFromClasspath(String filename) {
+  public static File getFileFromClasspath(String filename) {
     URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename);
     logger.debug("File Complete URI :" + fileCompleteUrl);
     File file = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
index 5332d18..d08c0f4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
@@ -37,25 +37,22 @@ import org.codehaus.jackson.type.TypeReference;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 
-@Component
 public class JSONUtil {
-
   private static final Logger logger = Logger.getLogger(JSONUtil.class);
 
-  @Autowired
-  private RESTErrorUtil restErrorUtil;
-
   private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-  private Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
+  private static final Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
 
+  private JSONUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
   @SuppressWarnings("unchecked")
-  public HashMap<String, Object> jsonToMapObject(String jsonStr) {
+  public static HashMap<String, Object> jsonToMapObject(String jsonStr) {
     if (StringUtils.isBlank(jsonStr)) {
       logger.info("jsonString is empty, cannot conver to map");
       return null;
@@ -66,17 +63,17 @@ public class JSONUtil {
       return (HashMap<String, Object>) tempObject;
 
     } catch (JsonParseException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (JsonMappingException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (IOException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     }
 
   }
 
   @SuppressWarnings("unchecked")
-  public List<HashMap<String, Object>> jsonToMapObjectList(String jsonStr) {
+  public static List<HashMap<String, Object>> jsonToMapObjectList(String jsonStr) {
     if (StringUtils.isBlank(jsonStr)) {
       return null;
     }
@@ -86,16 +83,16 @@ public class JSONUtil {
       return (List<HashMap<String, Object>>) tempObject;
 
     } catch (JsonParseException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (JsonMappingException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (IOException e) {
-      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     }
 
   }
 
-  public boolean isJSONValid(String jsonString) {
+  public static boolean isJSONValid(String jsonString) {
     try {
       new JSONObject(jsonString);
     } catch (JSONException ex) {
@@ -108,7 +105,7 @@ public class JSONUtil {
     return true;
   }
 
-  public HashMap<String, Object> readJsonFromFile(File jsonFile) {
+  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
     ObjectMapper mapper = new ObjectMapper();
     try {
       HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {});
@@ -119,7 +116,7 @@ public class JSONUtil {
     return new HashMap<String, Object>();
   }
 
-  public String mapToJSON(Map<String, Object> map) {
+  public static String mapToJSON(Map<String, Object> map) {
     ObjectMapper om = new ObjectMapper();
     try {
       String json = om.writeValueAsString(map);
@@ -134,7 +131,7 @@ public class JSONUtil {
   /**
    * WRITE JOSN IN FILE ( Delete existing file and create new file)
    */
-  public synchronized void writeJSONInFile(String jsonStr, File outputFile, boolean beautify) {
+  public static synchronized void writeJSONInFile(String jsonStr, File outputFile, boolean beautify) {
     FileWriter fileWriter = null;
     if (outputFile == null) {
       logger.error("user_pass json file can't be null.");
@@ -170,11 +167,11 @@ public class JSONUtil {
     }
   }
 
-  public String objToJson(Object obj) {
+  public static String objToJson(Object obj) {
     return gson.toJson(obj);
   }
 
-  public Object jsonToObj(String json, Class<?> klass) {
+  public static Object jsonToObj(String json, Class<?> klass) {
     return gson.fromJson(json, klass);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
deleted file mode 100644
index 59f0296..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.util;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-import org.springframework.beans.BeansException;
-import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
-import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
-
-public class PropertiesUtil extends PropertyPlaceholderConfigurer {
-  private static final Logger logger = Logger.getLogger(PropertiesUtil.class);
-  
-  private static final String LOGSEARCH_PROP_FILE="logsearch.properties";
-  
-  private static Map<String, String> propertiesMap;
-
-  private PropertiesUtil() {
-
-  }
-  
- static {
-    propertiesMap = new HashMap<String, String>();
-    Properties properties = new Properties();
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(LOGSEARCH_PROP_FILE);
-    FileInputStream fileInputStream = null;
-    try {
-      File file = new File(fileCompleteUrl.toURI());
-      fileInputStream = new FileInputStream(file.getAbsoluteFile());
-      properties.load(fileInputStream);
-    } catch (IOException | URISyntaxException e) {
-      logger.error("error loading prop for protocol config",e);
-    } finally {
-      if (fileInputStream != null) {
-        try {
-          fileInputStream.close();
-        } catch (IOException e) {
-        }
-      }
-    }
-    for (String key : properties.stringPropertyNames()) {
-      String value = properties.getProperty(key);
-      propertiesMap.put(key, value);
-    }
-  }
-
-  @Override
-  protected void processProperties(ConfigurableListableBeanFactory beanFactory, Properties props) throws BeansException {
-    super.processProperties(beanFactory, props);
-
-    propertiesMap = new HashMap<String, String>();
-
-    // First add the system properties
-    Set<Object> keySet = System.getProperties().keySet();
-    for (Object key : keySet) {
-      String keyStr = key.toString();
-      propertiesMap.put(keyStr, System.getProperties().getProperty(keyStr).trim());
-    }
-
-    // add our properties now
-    keySet = props.keySet();
-    for (Object key : keySet) {
-      String keyStr = key.toString();
-      propertiesMap.put(keyStr, props.getProperty(keyStr).trim());
-    }
-  }
-
-  public static String getProperty(String key, String defaultValue) {
-    if (key == null) {
-      return null;
-    }
-    String rtrnVal = propertiesMap.get(key);
-    if (rtrnVal == null) {
-      rtrnVal = defaultValue;
-    }
-    return rtrnVal;
-  }
-
-  public static String getProperty(String key) {
-    if (key == null) {
-      return null;
-    }
-    return propertiesMap.get(key);
-  }
-
-  public static String[] getPropertyStringList(String key) {
-    if (key == null) {
-      return null;
-    }
-    String value = propertiesMap.get(key);
-    if (value == null || value.trim().equals("")) {
-      return new String[0];
-    } else {
-      String[] splitValues = value.split(",");
-      String[] returnValues = new String[splitValues.length];
-      for (int i = 0; i < splitValues.length; i++) {
-        returnValues[i] = splitValues[i].trim();
-      }
-      return returnValues;
-    }
-  }
-
-  public static Integer getIntProperty(String key, int defaultValue) {
-    if (key == null) {
-      return null;
-    }
-    String rtrnVal = propertiesMap.get(key);
-    if (rtrnVal == null) {
-      return defaultValue;
-    }
-    return Integer.valueOf(rtrnVal);
-  }
-
-  public static Integer getIntProperty(String key) {
-    if (key == null) {
-      return null;
-    }
-    String rtrnVal = propertiesMap.get(key);
-    if (rtrnVal == null) {
-      return null;
-    }
-    return Integer.valueOf(rtrnVal);
-  }
-
-  public static Long getLongProperty(String key, long defaultValue) {
-    if (key == null) {
-      return null;
-    }
-    String rtrnVal = propertiesMap.get(key);
-    if (rtrnVal == null) {
-      return defaultValue;
-    }
-    return Long.valueOf(rtrnVal);
-  }
-
-  public static Long getLongProperty(String key) {
-    if (key == null) {
-      return null;
-    }
-    String rtrnVal = propertiesMap.get(key);
-    if (rtrnVal == null) {
-      return null;
-    }
-    return Long.valueOf(rtrnVal);
-  }
-
-  public static boolean getBooleanProperty(String key, boolean defaultValue) {
-    if (key == null) {
-      return defaultValue;
-    }
-    String value = getProperty(key);
-    if (value == null) {
-      return defaultValue;
-    }
-    return Boolean.parseBoolean(value);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
deleted file mode 100644
index 5961cff..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.util;
-
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.commons.lang.StringUtils;
-import org.apache.solr.client.solrj.SolrQuery;
-
-public class QueryBase {
-
-  //Solr Facet Methods
-  public void setFacetField(SolrQuery solrQuery, String facetField) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
-    setFacetLimit(solrQuery, -1);
-  }
-
-  public void setJSONFacet(SolrQuery solrQuery, String jsonQuery) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_JSON_FIELD, jsonQuery);
-    setFacetLimit(solrQuery, -1);
-  }
-
-  public void setFacetSort(SolrQuery solrQuery, String sortType) {
-    solrQuery.setFacet(true);
-    solrQuery.setFacetSort(sortType);
-  }
-
-  public void setFacetPivot(SolrQuery solrQuery, int mincount, String... hirarchy) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_PIVOT, hirarchy);
-    solrQuery.set(LogSearchConstants.FACET_PIVOT_MINCOUNT, mincount);
-    setFacetLimit(solrQuery, -1);
-  }
-
-  public void setFacetDate(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_DATE, facetField);
-    solrQuery.set(LogSearchConstants.FACET_DATE_START, from);
-    solrQuery.set(LogSearchConstants.FACET_DATE_END, to);
-    solrQuery.set(LogSearchConstants.FACET_DATE_GAP, unit);
-    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
-    setFacetLimit(solrQuery, -1);
-  }
-
-  public void setFacetRange(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_RANGE, facetField);
-    solrQuery.set(LogSearchConstants.FACET_RANGE_START, from);
-    solrQuery.set(LogSearchConstants.FACET_RANGE_END, to);
-    solrQuery.set(LogSearchConstants.FACET_RANGE_GAP, unit);
-    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
-    setFacetLimit(solrQuery, -1);
-  }
-
-  public void setFacetLimit(SolrQuery solrQuery, int limit) {
-    solrQuery.set("facet.limit", limit);
-  }
-
-  //Solr Group Mehtods
-  public void setGroupField(SolrQuery solrQuery, String groupField, int rows) {
-    solrQuery.set(LogSearchConstants.FACET_GROUP, true);
-    solrQuery.set(LogSearchConstants.FACET_GROUP_FIELD, groupField);
-    solrQuery.set(LogSearchConstants.FACET_GROUP_MAIN, true);
-    setRowCount(solrQuery, rows);
-  }
-
-  //Main Query
-  public void setMainQuery(SolrQuery solrQuery, String query) {
-    String defalultQuery = "*:*";
-    if (StringUtils.isBlank(query)){
-      solrQuery.setQuery(defalultQuery);
-    }else{
-      solrQuery.setQuery(query);
-    }
-  }
-
-  public void setStart(SolrQuery solrQuery, int start) {
-    int defaultStart = 0;
-    if (start > defaultStart) {
-      solrQuery.setStart(start);
-    } else {
-      solrQuery.setStart(defaultStart);
-    }
-  }
-
-  //Set Number of Rows
-  public void setRowCount(SolrQuery solrQuery, int rows) {
-    if (rows > 0) {
-      solrQuery.setRows(rows);
-    } else {
-      solrQuery.setRows(0);
-      solrQuery.remove(LogSearchConstants.SORT);
-    }
-  }
-
-  //Solr Facet Methods
-  public void setFacetFieldWithMincount(SolrQuery solrQuery, String facetField, int minCount) {
-    solrQuery.setFacet(true);
-    setRowCount(solrQuery, 0);
-    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
-    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, minCount);
-    setFacetLimit(solrQuery, -1);
-  }
-  
-  public void setFl(SolrQuery solrQuery,String field){
-    solrQuery.set(LogSearchConstants.FL, field);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
index 6df5b05..88fb0d5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
@@ -29,19 +29,19 @@ import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.view.VMessage;
 import org.apache.ambari.logsearch.view.VResponse;
 import org.apache.log4j.Logger;
-import org.springframework.stereotype.Component;
 
-@Component
 public class RESTErrorUtil {
+  private static final Logger logger = Logger.getLogger(RESTErrorUtil.class);
 
-  private static Logger logger = Logger.getLogger(RESTErrorUtil.class);
-
-  public WebApplicationException createRESTException(VResponse response) {
+  private RESTErrorUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static WebApplicationException createRESTException(VResponse response) {
     return createRESTException(response, HttpServletResponse.SC_BAD_REQUEST);
   }
 
-  public WebApplicationException createRESTException(String errorMessage,
-                                                     MessageEnums messageEnum) {
+  public static WebApplicationException createRESTException(String errorMessage, MessageEnums messageEnum) {
     List<VMessage> messageList = new ArrayList<VMessage>();
     messageList.add(messageEnum.getMessage());
 
@@ -54,7 +54,7 @@ public class RESTErrorUtil {
     return webAppEx;
   }
 
-  private WebApplicationException createRESTException(VResponse response, int sc) {
+  private static WebApplicationException createRESTException(VResponse response, int sc) {
     Response errorResponse = Response.status(sc).entity(response).build();
     WebApplicationException restException = new WebApplicationException(errorResponse);
     restException.fillInStackTrace();

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
index bcf9605..33262f3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
@@ -25,30 +25,28 @@ import java.util.Locale;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.schema.TrieDoubleField;
 import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieIntField;
 import org.apache.solr.schema.TrieLongField;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
-import org.springframework.stereotype.Component;
 import org.springframework.util.CollectionUtils;
 
-@Component
 public class SolrUtil {
-
-  @Autowired
-  private JSONUtil jsonUtil;
-
-  public String setField(String fieldName, String value) {
+  private SolrUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static String setField(String fieldName, String value) {
     if (value == null || value.trim().length() == 0) {
       return "";
     }
     return fieldName + ":" + value.trim().toLowerCase(Locale.ENGLISH);
   }
 
-  public String inList(String fieldName, int[] values) {
+  public static String inList(String fieldName, int[] values) {
     if (ArrayUtils.isEmpty(values)) {
       return "";
     }
@@ -64,7 +62,7 @@ public class SolrUtil {
     }
   }
 
-  public String inList(Collection<Long> values) {
+  public static String inList(Collection<Long> values) {
     if (CollectionUtils.isEmpty(values)) {
       return "";
     }
@@ -81,7 +79,7 @@ public class SolrUtil {
 
   }
 
-  public String orList(String fieldName, String[] valueList, String wildCard) {
+  public static String orList(String fieldName, String[] valueList, String wildCard) {
     if (ArrayUtils.isEmpty(valueList)) {
       return "";
     }
@@ -109,7 +107,7 @@ public class SolrUtil {
 
   }
 
-  public String andList(String fieldName, String[] valueList, String wildCard) {
+  public static String andList(String fieldName, String[] valueList, String wildCard) {
     if (ArrayUtils.isEmpty(valueList)) {
       return "";
     }
@@ -140,7 +138,7 @@ public class SolrUtil {
   /**
    * Copied from Solr ClientUtils.escapeQueryChars and removed escaping *
    */
-  public String escapeQueryChars(String s) {
+  public static String escapeQueryChars(String s) {
     StringBuilder sb = new StringBuilder();
     int prev = 0;
     if (s != null) {
@@ -169,7 +167,7 @@ public class SolrUtil {
     return sb.toString();
   }
 
-  private String escapeForWhiteSpaceTokenizer(String search) {
+  private static String escapeForWhiteSpaceTokenizer(String search) {
     if (search == null) {
       return null;
     }
@@ -188,7 +186,7 @@ public class SolrUtil {
     return newSearch;
   }
 
-  public String escapeForStandardTokenizer(String search) {
+  public static String escapeForStandardTokenizer(String search) {
     if (search == null) {
       return null;
     }
@@ -207,7 +205,7 @@ public class SolrUtil {
     return newSearch;
   }
 
-  private String escapeForKeyTokenizer(String search) {
+  private static String escapeForKeyTokenizer(String search) {
     if (search.startsWith("*") && search.endsWith("*") && !StringUtils.isBlank(search)) {
       // Remove the * from both the sides
       if (search.length() > 1) {
@@ -226,7 +224,7 @@ public class SolrUtil {
    * This is a special case scenario to handle log_message for wild card
    * scenarios
    */
-  public String escapeForLogMessage(String field, String search) {
+  public static String escapeForLogMessage(String field, String search) {
     if (search.startsWith("*") && search.endsWith("*")) {
       field = LogSearchConstants.SOLR_KEY_LOG_MESSAGE;
       search = escapeForKeyTokenizer(search);
@@ -238,7 +236,7 @@ public class SolrUtil {
     return field + ":" + search;
   }
 
-  public String makeSolrSearchString(String search) {
+  public static String makeSolrSearchString(String search) {
     String newString = search.trim();
     String newSearch = newString.replaceAll("(?=[]\\[+&|!(){},:\"^~/=$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
@@ -249,7 +247,7 @@ public class SolrUtil {
     return "*" + newSearch + "*";
   }
 
-  public String makeSolrSearchStringWithoutAsterisk(String search) {
+  public static String makeSolrSearchStringWithoutAsterisk(String search) {
     String newString = search.trim();
     String newSearch = newString.replaceAll("(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
@@ -261,7 +259,7 @@ public class SolrUtil {
     return newSearch;
   }
 
-  public String makeSearcableString(String search) {
+  public static String makeSearcableString(String search) {
     if (StringUtils.isBlank(search)) {
       return "";
     }
@@ -272,7 +270,7 @@ public class SolrUtil {
   }
   
 
-  public boolean isSolrFieldNumber(String fieldType,SolrDaoBase solrDaoBase) {
+  public static boolean isSolrFieldNumber(String fieldType,SolrDaoBase solrDaoBase) {
     if (StringUtils.isBlank(fieldType)) {
       return false;
     } else {
@@ -281,30 +279,25 @@ public class SolrUtil {
         return false;
       }
       String fieldTypeClassName = (String) typeInfoMap.get("class");
-      if (fieldTypeClassName.equalsIgnoreCase(TrieIntField.class
-          .getSimpleName())) {
+      if (fieldTypeClassName.equalsIgnoreCase(TrieIntField.class.getSimpleName())) {
         return true;
       }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieDoubleField.class
-          .getSimpleName())) {
+      if (fieldTypeClassName.equalsIgnoreCase(TrieDoubleField.class.getSimpleName())) {
         return true;
       }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieFloatField.class
-          .getSimpleName())) {
+      if (fieldTypeClassName.equalsIgnoreCase(TrieFloatField.class.getSimpleName())) {
         return true;
       }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieLongField.class
-          .getSimpleName())) {
+      if (fieldTypeClassName.equalsIgnoreCase(TrieLongField.class.getSimpleName())) {
         return true;
       }
       return false;
     }
   }
   
-  public HashMap<String, Object> getFieldTypeInfoMap(String fieldType,SolrDaoBase solrDaoBase) {
+  public static HashMap<String, Object> getFieldTypeInfoMap(String fieldType,SolrDaoBase solrDaoBase) {
     String fieldTypeMetaData = solrDaoBase.schemaFieldTypeMap.get(fieldType);
-    HashMap<String, Object> fieldTypeMap = jsonUtil
-        .jsonToMapObject(fieldTypeMetaData);
+    HashMap<String, Object> fieldTypeMap = JSONUtil.jsonToMapObject(fieldTypeMetaData);
     if (fieldTypeMap == null) {
       return new HashMap<String, Object>();
     }
@@ -315,4 +308,111 @@ public class SolrUtil {
     }
     return fieldTypeMap;
   }
+  
+  //=============================================================================================================
+  
+  //Solr Facet Methods
+  public static void setFacetField(SolrQuery solrQuery, String facetField) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
+    setFacetLimit(solrQuery, -1);
+  }
+
+  public static void setJSONFacet(SolrQuery solrQuery, String jsonQuery) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_JSON_FIELD, jsonQuery);
+    setFacetLimit(solrQuery, -1);
+  }
+
+  public static void setFacetSort(SolrQuery solrQuery, String sortType) {
+    solrQuery.setFacet(true);
+    solrQuery.setFacetSort(sortType);
+  }
+
+  public static void setFacetPivot(SolrQuery solrQuery, int mincount, String... hirarchy) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_PIVOT, hirarchy);
+    solrQuery.set(LogSearchConstants.FACET_PIVOT_MINCOUNT, mincount);
+    setFacetLimit(solrQuery, -1);
+  }
+
+  public static void setFacetDate(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_DATE, facetField);
+    solrQuery.set(LogSearchConstants.FACET_DATE_START, from);
+    solrQuery.set(LogSearchConstants.FACET_DATE_END, to);
+    solrQuery.set(LogSearchConstants.FACET_DATE_GAP, unit);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
+    setFacetLimit(solrQuery, -1);
+  }
+
+  public static void setFacetRange(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_RANGE, facetField);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_START, from);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_END, to);
+    solrQuery.set(LogSearchConstants.FACET_RANGE_GAP, unit);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, 0);
+    setFacetLimit(solrQuery, -1);
+  }
+
+  public static void setFacetLimit(SolrQuery solrQuery, int limit) {
+    solrQuery.set("facet.limit", limit);
+  }
+
+  //Solr Group Mehtods
+  public static void setGroupField(SolrQuery solrQuery, String groupField, int rows) {
+    solrQuery.set(LogSearchConstants.FACET_GROUP, true);
+    solrQuery.set(LogSearchConstants.FACET_GROUP_FIELD, groupField);
+    solrQuery.set(LogSearchConstants.FACET_GROUP_MAIN, true);
+    setRowCount(solrQuery, rows);
+  }
+
+  //Main Query
+  public static void setMainQuery(SolrQuery solrQuery, String query) {
+    String defalultQuery = "*:*";
+    if (StringUtils.isBlank(query)){
+      solrQuery.setQuery(defalultQuery);
+    }else{
+      solrQuery.setQuery(query);
+    }
+  }
+
+  public static void setStart(SolrQuery solrQuery, int start) {
+    int defaultStart = 0;
+    if (start > defaultStart) {
+      solrQuery.setStart(start);
+    } else {
+      solrQuery.setStart(defaultStart);
+    }
+  }
+
+  //Set Number of Rows
+  public static void setRowCount(SolrQuery solrQuery, int rows) {
+    if (rows > 0) {
+      solrQuery.setRows(rows);
+    } else {
+      solrQuery.setRows(0);
+      solrQuery.remove(LogSearchConstants.SORT);
+    }
+  }
+
+  //Solr Facet Methods
+  public static void setFacetFieldWithMincount(SolrQuery solrQuery, String facetField, int minCount) {
+    solrQuery.setFacet(true);
+    setRowCount(solrQuery, 0);
+    solrQuery.set(LogSearchConstants.FACET_FIELD, facetField);
+    solrQuery.set(LogSearchConstants.FACET_MINCOUNT, minCount);
+    setFacetLimit(solrQuery, -1);
+  }
+  
+  public static void setFl(SolrQuery solrQuery,String field){
+    solrQuery.set(LogSearchConstants.FL, field);
+  }
+  
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
deleted file mode 100644
index ff80e73..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/XMLPropertiesUtil.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-
-import org.apache.log4j.Logger;
-import org.springframework.util.DefaultPropertiesPersister;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-public class XMLPropertiesUtil extends DefaultPropertiesPersister {
-  private static Logger logger = Logger.getLogger(XMLPropertiesUtil.class);
-
-  public XMLPropertiesUtil() {
-  }
-
-  @Override
-  public void loadFromXml(Properties properties, InputStream inputStream)
-      throws IOException {
-    try {
-      DocumentBuilderFactory xmlDocumentBuilderFactory = DocumentBuilderFactory
-          .newInstance();
-      xmlDocumentBuilderFactory.setIgnoringComments(true);
-      xmlDocumentBuilderFactory.setNamespaceAware(true);
-      DocumentBuilder xmlDocumentBuilder = xmlDocumentBuilderFactory
-          .newDocumentBuilder();
-      Document xmlDocument = xmlDocumentBuilder.parse(inputStream);
-      if (xmlDocument != null) {
-        xmlDocument.getDocumentElement().normalize();
-        NodeList nList = xmlDocument.getElementsByTagName("property");
-        if (nList != null) {
-          for (int temp = 0; temp < nList.getLength(); temp++) {
-            Node nNode = nList.item(temp);
-            if (nNode.getNodeType() == Node.ELEMENT_NODE) {
-              Element eElement = (Element) nNode;
-              String propertyName = "";
-              String propertyValue = "";
-              if (eElement.getElementsByTagName("name") != null
-                  && eElement.getElementsByTagName("name").item(0) != null) {
-                propertyName = eElement.getElementsByTagName("name").item(0)
-                    .getTextContent().trim();
-              }
-              if (eElement.getElementsByTagName("value") != null
-                  && eElement.getElementsByTagName("value").item(0) != null) {
-                propertyValue = eElement.getElementsByTagName("value").item(0)
-                    .getTextContent().trim();
-              }
-              if (propertyName != null && !propertyName.isEmpty()) {
-                properties.put(propertyName, propertyValue);
-              }
-            }
-          }
-        }
-      }
-    } catch (Exception e) {
-      logger.error("Error loading xml properties ", e);
-    }
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java
index d706beb..fdec8d3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchAuthFailureHandler.java
@@ -24,27 +24,20 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.ExceptionMappingAuthenticationFailureHandler;
 
 public class LogsearchAuthFailureHandler extends ExceptionMappingAuthenticationFailureHandler {
-
   private static final Logger logger = Logger.getLogger(LogsearchAuthFailureHandler.class);
 
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
-                                      AuthenticationException exception) throws IOException, ServletException {
+  public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response, AuthenticationException exception)
+      throws IOException, ServletException {
     logger.debug(" AuthFailureHandler + onAuthenticationFailure");
     // TODO UI side handle status and redirect to login page with proper
     response.setContentType("application/json");
     response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
     response.getOutputStream().println("{ \"error\": \"" + "login failed !!" + "\" }");
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
index 62f762c..1b24c06 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
@@ -24,23 +24,17 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint;
 
 class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEntryPoint {
-
-  @Autowired
-  RESTErrorUtil restErrorUtil;
+  private static final Logger logger = Logger.getLogger(LogsearchAuthenticationEntryPoint.class);
 
   public LogsearchAuthenticationEntryPoint(String loginFormUrl) {
     super(loginFormUrl);
   }
 
-  private static final Logger logger = Logger.getLogger(LogsearchAuthenticationEntryPoint.class);
-
   @Override
   public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authException)
     throws IOException, ServletException {

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
index 6443d62..29fd5b2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
@@ -40,7 +40,6 @@ import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.authentication.AbstractAuthenticationToken;
@@ -54,6 +53,7 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.core.context.SecurityContextImpl;
 import org.springframework.security.core.userdetails.User;
 import org.springframework.security.core.userdetails.UserDetails;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.commons.collections.iterators.IteratorEnumeration;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
@@ -102,18 +102,18 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   @Override
   public void init(FilterConfig conf) throws ServletException {
     final FilterConfig globalConf = conf;
-    String hostName = PropertiesUtil.getProperty(HOST_NAME, "localhost");
+    String hostName = PropertiesHelper.getProperty(HOST_NAME, "localhost");
     final Map<String, String> params = new HashMap<String, String>();
     if (spnegoEnable) {
       authType = KerberosAuthenticationHandler.TYPE;
     }
     params.put(AUTH_TYPE,authType);
-    params.put(NAME_RULES_PARAM,PropertiesUtil.getProperty(NAME_RULES, "DEFAULT"));
-    params.put(TOKEN_VALID_PARAM, PropertiesUtil.getProperty(TOKEN_VALID, "30"));
-    params.put(COOKIE_DOMAIN_PARAM, PropertiesUtil.getProperty(COOKIE_DOMAIN, hostName));
-    params.put(COOKIE_PATH_PARAM, PropertiesUtil.getProperty(COOKIE_PATH, "/"));
-    params.put(PRINCIPAL_PARAM,PropertiesUtil.getProperty(PRINCIPAL,""));
-    params.put(KEYTAB_PARAM,PropertiesUtil.getProperty(KEYTAB,""));
+    params.put(NAME_RULES_PARAM,PropertiesHelper.getProperty(NAME_RULES, "DEFAULT"));
+    params.put(TOKEN_VALID_PARAM, PropertiesHelper.getProperty(TOKEN_VALID, "30"));
+    params.put(COOKIE_DOMAIN_PARAM, PropertiesHelper.getProperty(COOKIE_DOMAIN, hostName));
+    params.put(COOKIE_PATH_PARAM, PropertiesHelper.getProperty(COOKIE_PATH, "/"));
+    params.put(PRINCIPAL_PARAM,PropertiesHelper.getProperty(PRINCIPAL,""));
+    params.put(KEYTAB_PARAM,PropertiesHelper.getProperty(KEYTAB,""));
     FilterConfig myConf = new FilterConfig() {
       @Override
       public ServletContext getServletContext() {
@@ -196,7 +196,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
     }
     if (!isLoginRequest(httpRequest) && spnegoEnable
         && (existingAuth == null || !existingAuth.isAuthenticated())) {
-      KerberosName.setRules(PropertiesUtil.getProperty(NAME_RULES, "DEFAULT"));
+      KerberosName.setRules(PropertiesHelper.getProperty(NAME_RULES, "DEFAULT"));
       String userName = getUsernameFromRequest(httpRequest);
       if ((existingAuth == null || !existingAuth.isAuthenticated())
           && (!StringUtils.isEmpty(userName))) {
@@ -230,12 +230,12 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   }
 
   private void isSpnegoEnable() {
-    spnegoEnable = PropertiesUtil.getBooleanProperty(KERBEROS_ENABLE, false);
+    spnegoEnable = PropertiesHelper.getBooleanProperty(KERBEROS_ENABLE, false);
     if (spnegoEnable) {
       spnegoEnable = false;
-      String keytab = PropertiesUtil.getProperty(KEYTAB);
-      String principal = PropertiesUtil.getProperty(PRINCIPAL);
-      String hostname = PropertiesUtil.getProperty(HOST_NAME);
+      String keytab = PropertiesHelper.getProperty(KEYTAB);
+      String principal = PropertiesHelper.getProperty(PRINCIPAL);
+      String hostname = PropertiesHelper.getProperty(HOST_NAME);
       if (!StringUtils.isEmpty(keytab) && !StringUtils.isEmpty(principal)
           && !StringUtils.isEmpty(hostname)) {
         spnegoEnable = true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapUtil.java
index 99940df..6248e74 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapUtil.java
@@ -21,8 +21,8 @@ package org.apache.ambari.logsearch.web.security;
 import java.io.IOException;
 import java.util.Properties;
 
-import org.apache.ambari.logsearch.util.PropertiesUtil;
-import org.apache.ambari.logsearch.util.XMLPropertiesUtil;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
+import org.apache.ambari.logsearch.common.XMLPropertiesHelper;
 import org.apache.log4j.Logger;
 import org.springframework.core.io.ClassPathResource;
 
@@ -94,13 +94,13 @@ public class LdapUtil {
    */
   public static LdapProperties loadLdapProperties() {
     LdapProperties ldapServerProperties = null;
-    String ldapConfigFileName = PropertiesUtil.getProperty("logsearch.login.ldap.config", "logsearch-admin-site.xml");
+    String ldapConfigFileName = PropertiesHelper.getProperty("logsearch.login.ldap.config", "logsearch-admin-site.xml");
     Properties props = null;
     ClassPathResource resource = new ClassPathResource(ldapConfigFileName);
     if (resource != null) {
       try {
         props = new Properties();
-        new XMLPropertiesUtil().loadFromXml(props, resource.getInputStream());
+        new XMLPropertiesHelper().loadFromXml(props, resource.getInputStream());
         ldapServerProperties = getLdapServerProperties(props);
       } catch (IOException e) {
         logger.error("Ldap configudation file loading failed : " + e.getMessage());

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
index d82bf8e..0c102c3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logsearch.web.security;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.springframework.security.authentication.AuthenticationProvider;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.GrantedAuthority;
@@ -61,7 +61,7 @@ public abstract class LogsearchAbstractAuthenticationProvider implements Authent
   public boolean isEnable(AUTH_METHOD method) {
     String methodName = method.name().toLowerCase();
     String property = AUTH_METHOD_PROP_START_WITH + methodName + ".enable";
-    boolean isEnable = PropertiesUtil.getBooleanProperty(property, false);
+    boolean isEnable = PropertiesHelper.getBooleanProperty(property, false);
     return isEnable;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
index 8c69152..0f7377d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
@@ -54,12 +54,6 @@ public class LogsearchAuthenticationProvider extends
   @Autowired
   LogsearchExternalServerAuthenticationProvider externalServerAuthenticationProvider;
 
-  @Autowired
-  JSONUtil jsonUtil;
-
-  @Autowired
-  private UserDetailsService userService;
-
   @Override
   public Authentication authenticate(Authentication authentication)
     throws AuthenticationException {
@@ -117,7 +111,7 @@ public class LogsearchAuthenticationProvider extends
       }
       return authentication;
     } finally {
-      String jsonStr = jsonUtil.mapToJSON(auditRecord);
+      String jsonStr = JSONUtil.mapToJSON(auditRecord);
       if (isSuccess) {
         auditLogger.info(jsonStr);
       } else {
@@ -126,11 +120,6 @@ public class LogsearchAuthenticationProvider extends
     }
   }
 
-  /**
-   * @param authentication
-   * @param authMethod
-   * @return
-   */
   public Authentication doAuth(Authentication authentication, AUTH_METHOD authMethod) {
     if (authMethod.equals(AUTH_METHOD.LDAP)) {
       authentication = ldapAuthenticationProvider.authenticate(authentication);

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
index 72ee60f..4eea3e1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
@@ -23,9 +23,9 @@ import java.util.List;
 
 import javax.annotation.PostConstruct;
 
-import org.apache.ambari.logsearch.util.ExternalServerClient;
+import org.apache.ambari.logsearch.common.ExternalServerClient;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
@@ -98,14 +98,11 @@ public class LogsearchExternalServerAuthenticationProvider extends
   @Autowired
   ExternalServerClient externalServerClient;
 
-  @Autowired
-  JSONUtil jsonUtil;
-
   private String loginAPIURL = "/api/v1/users/$USERNAME/privileges?fields=*";// default
 
   @PostConstruct
   public void initialization() {
-    loginAPIURL = PropertiesUtil.getProperty(AUTH_METHOD_PROP_START_WITH
+    loginAPIURL = PropertiesHelper.getProperty(AUTH_METHOD_PROP_START_WITH
         + "external_auth.login_url", loginAPIURL);
   }
 
@@ -160,11 +157,11 @@ public class LogsearchExternalServerAuthenticationProvider extends
    */
   @SuppressWarnings("static-access")
   private boolean isAllowedRole(String responseJson) {
-    String allowedRoleList[] = PropertiesUtil
+    String allowedRoleList[] = PropertiesHelper
         .getPropertyStringList(ALLOWED_ROLE_PROP);
 
     List<String> values = new ArrayList<String>();
-    jsonUtil.getValuesOfKey(responseJson,
+    JSONUtil.getValuesOfKey(responseJson,
         PRIVILEGE_INFO.PERMISSION_NAME.toString(), values);
     if (values.isEmpty())
       return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
index f334e67..b457a1d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
@@ -37,9 +37,9 @@ http://www.springframework.org/schema/util/spring-util.xsd">
 
 	<context:component-scan base-package="org.apache.ambari.logsearch" />
 	<task:annotation-driven />
-	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.util.XMLPropertiesUtil" />
+	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.common.XMLPropertiesHelper" />
 	
-	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.util.PropertiesUtil">
+	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.common.PropertiesHelper">
 		<property name="locations">
 			<list>
 				<value>classpath:default.properties</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
index 0ded95d..ba5b074 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
@@ -232,7 +232,6 @@ public class SolrDaoBaseTest {
     expectedException.expect(WebApplicationException.class);
     
     SolrDaoBase dao = new SolrDaoBase(LogType.SERVICE) {};
-    dao.restErrorUtil = new RESTErrorUtil();
     dao.process(new SolrQuery());
   }
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
index 5ef286f..dddbf31 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
@@ -33,20 +33,11 @@ import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
 
 import junit.framework.Assert;
 
-@RunWith(SpringJUnit4ClassRunner.class)
-@ContextConfiguration(locations = { "/applicationContext.xml" })
 public class UserConfigSolrDaoTest {
   
-  @Autowired
-  private UserConfigSolrDao dao;
-  
   @Test
   public void testUserConfigDaoPostConstructor() throws Exception {
     SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
@@ -78,6 +69,8 @@ public class UserConfigSolrDaoTest {
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
     EasyMock.replay(mockSolrClient);
     
+    UserConfigSolrDao dao = new UserConfigSolrDao();
+    dao.postConstructor();
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
     
@@ -119,6 +112,8 @@ public class UserConfigSolrDaoTest {
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
     EasyMock.replay(mockSolrClient);
     
+    UserConfigSolrDao dao = new UserConfigSolrDao();
+    dao.postConstructor();
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
index 5e24d88..38437a4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
@@ -37,9 +37,9 @@ http://www.springframework.org/schema/util/spring-util.xsd">
 
 	<context:component-scan base-package="org.apache.ambari.logsearch" />
 	<task:annotation-driven />
-	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.util.XMLPropertiesUtil" />
+	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.common.XMLPropertiesHelper" />
 	
-	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.util.PropertiesUtil">
+	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.common.PropertiesHelper">
 		<property name="locations">
 			<list>
 				<value>classpath:default.properties</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
deleted file mode 100644
index f1d1dbe..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext_testManagers.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:aop="http://www.springframework.org/schema/aop" xmlns:jee="http://www.springframework.org/schema/jee"
-xmlns:tx="http://www.springframework.org/schema/tx" xmlns:context="http://www.springframework.org/schema/context"
-xmlns:task="http://www.springframework.org/schema/task" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:util="http://www.springframework.org/schema/util"
-xsi:schemaLocation="http://www.springframework.org/schema/aop
-http://www.springframework.org/schema/aop/spring-aop-4.2.xsd
-http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
-http://www.springframework.org/schema/context
-http://www.springframework.org/schema/context/spring-context-4.2.xsd
-http://www.springframework.org/schema/jee
-http://www.springframework.org/schema/jee/spring-jee-4.2.xsd
-http://www.springframework.org/schema/tx
-http://www.springframework.org/schema/tx/spring-tx-4.2.xsd
-http://www.springframework.org/schema/task
-http://www.springframework.org/schema/task/spring-task-4.2.xsd
-http://www.springframework.org/schema/util
-http://www.springframework.org/schema/util/spring-util.xsd">
-
-	<context:component-scan base-package="org.apache.ambari.logsearch.manager.dao, org.apache.ambari.logsearch.util" />
-	<task:annotation-driven />
-	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.util.XMLPropertiesUtil" />
-	
-	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.util.PropertiesUtil">
-		<property name="locations">
-			<list>
-				<value>classpath:default.properties</value>
-				<value>classpath:logsearch.properties</value>
-				<value>classpath:logsearch-admin-site.xml</value>
-			</list>
-		</property>
-		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
-	</bean>
-	
-</beans>


[27/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
deleted file mode 100644
index 703d877..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserDaoTest.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.dao;
-
-import java.util.Collection;
-
-import org.apache.ambari.logsearch.web.model.Role;
-import org.apache.ambari.logsearch.web.model.User;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.security.core.GrantedAuthority;
-import org.springframework.test.context.ContextConfiguration;
-import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
-
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-
-@RunWith(SpringJUnit4ClassRunner.class)
-@ContextConfiguration(locations = { "/applicationContext.xml" })
-public class UserDaoTest {
-
-  @Autowired
-  private UserDao dao;
-  
-  @Test
-  public void testUserDaoInitAndFindUser() throws Exception {
-    User user = dao.loadUserByUsername("testUserName");
-    assertEquals(user.getUsername(), "testUserName");
-    assertEquals(user.getFirstName(), "Test User Name");
-    assertEquals(user.getLastName(), "Test User Name");
-    
-    Collection<? extends GrantedAuthority> authorities = user.getAuthorities();
-    assertTrue(authorities.size() == 1);
-    
-    Role authority = (Role)authorities.iterator().next();
-    assertEquals(authority.getName(), "ROLE_USER");
-    assertTrue(authority.getPrivileges().size() == 1);
-    assertEquals(authority.getPrivileges().get(0).getName(), "READ_PRIVILEGE");
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
deleted file mode 100644
index 38437a4..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/resources/applicationContext.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:aop="http://www.springframework.org/schema/aop" xmlns:jee="http://www.springframework.org/schema/jee"
-xmlns:tx="http://www.springframework.org/schema/tx" xmlns:context="http://www.springframework.org/schema/context"
-xmlns:task="http://www.springframework.org/schema/task" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:util="http://www.springframework.org/schema/util"
-xsi:schemaLocation="http://www.springframework.org/schema/aop
-http://www.springframework.org/schema/aop/spring-aop-4.2.xsd
-http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
-http://www.springframework.org/schema/context
-http://www.springframework.org/schema/context/spring-context-4.2.xsd
-http://www.springframework.org/schema/jee
-http://www.springframework.org/schema/jee/spring-jee-4.2.xsd
-http://www.springframework.org/schema/tx
-http://www.springframework.org/schema/tx/spring-tx-4.2.xsd
-http://www.springframework.org/schema/task
-http://www.springframework.org/schema/task/spring-task-4.2.xsd
-http://www.springframework.org/schema/util
-http://www.springframework.org/schema/util/spring-util.xsd">
-
-	<context:component-scan base-package="org.apache.ambari.logsearch" />
-	<task:annotation-driven />
-	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.common.XMLPropertiesHelper" />
-	
-	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.common.PropertiesHelper">
-		<property name="locations">
-			<list>
-				<value>classpath:default.properties</value>
-				<value>classpath:logsearch.properties</value>
-				<value>classpath:logsearch-admin-site.xml</value>
-			</list>
-		</property>
-		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
-	</bean>
-	
-</beans>


[32/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
new file mode 100644
index 0000000..ba4f4a8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/BaseServiceLogRequest.java
@@ -0,0 +1,186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.BundleIdParamDefinition;
+import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition;
+import org.apache.ambari.logsearch.model.request.ServiceLogParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class BaseServiceLogRequest extends BaseLogRequest
+  implements ServiceLogParamDefinition, BundleIdParamDefinition, DateRangeParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_LEVEL)
+  private String level;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_ADVANCED_SEARCH)
+  private String advancedSearch;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TREE_PARAMS)
+  private String treeParams;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_E_MESSAGE)
+  private String eMessage;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_G_MUST_NOT)
+  private String gMustNot;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_HOST_NAME)
+  private String hostName;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_COMPONENT_NAME)
+  private String componentName;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FILE_NAME)
+  private String fileName;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_BUNDLE_ID)
+  private String bundleId;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FROM)
+  private String from;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TO)
+  private String to;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_DATE_RANGE_LABEL)
+  private String dateRangeLabel;
+
+  @Override
+  public String getLevel() {
+    return level;
+  }
+
+  @Override
+  public void setLevel(String level) {
+    this.level = level;
+  }
+
+  @Override
+  public String getAdvancedSearch() {
+    return advancedSearch;
+  }
+
+  @Override
+  public void setAdvancedSearch(String advancedSearch) {
+    this.advancedSearch = advancedSearch;
+  }
+
+  @Override
+  public String getTreeParams() {
+    return treeParams;
+  }
+
+  @Override
+  public void setTreeParams(String treeParams) {
+    this.treeParams = treeParams;
+  }
+
+  @Override
+  public String geteMessage() {
+    return eMessage;
+  }
+
+  @Override
+  public void seteMessage(String eMessage) {
+    this.eMessage = eMessage;
+  }
+
+  @Override
+  public String getgMustNot() {
+    return gMustNot;
+  }
+
+  @Override
+  public void setgMustNot(String gMustNot) {
+    this.gMustNot = gMustNot;
+  }
+
+  @Override
+  public String getHostName() {
+    return hostName;
+  }
+
+  @Override
+  public void setHostName(String hostName) {
+    this.hostName = hostName;
+  }
+
+  @Override
+  public String getComponentName() {
+    return componentName;
+  }
+
+  @Override
+  public void setComponentName(String componentName) {
+    this.componentName = componentName;
+  }
+
+  @Override
+  public String getFileName() {
+    return fileName;
+  }
+
+  @Override
+  public void setFileName(String fileName) {
+    this.fileName = fileName;
+  }
+
+  @Override
+  public String getBundleId() {
+    return bundleId;
+  }
+
+  @Override
+  public void setBundleId(String bundleId) {
+    this.bundleId = bundleId;
+  }
+
+  @Override
+  public String getFrom() {
+    return from;
+  }
+
+  @Override
+  public void setFrom(String from) {
+    this.from = from;
+  }
+
+  @Override
+  public String getTo() {
+    return to;
+  }
+
+  @Override
+  public void setTo(String to) {
+    this.to = to;
+  }
+
+  @Override
+  public String getDateRangeLabel() {
+    return dateRangeLabel;
+  }
+
+  @Override
+  public void setDateRangeLabel(String dateRangeLabel) {
+    this.dateRangeLabel = dateRangeLabel;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java
new file mode 100644
index 0000000..7001cf3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/CommonSearchRequest.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.CommonSearchParamDefinition;
+import org.apache.ambari.logsearch.model.request.SearchRequest;
+
+import javax.ws.rs.QueryParam;
+
+public class CommonSearchRequest implements SearchRequest, CommonSearchParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_START_INDEX)
+  private String startIndex;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_PAGE)
+  private String page;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_PAGE_SIZE)
+  private String pageSize;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_SORT_BY)
+  private String sortBy;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_SORT_TYPE)
+  private String sortType;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_START_TIME)
+  private String startTime;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_END_TIME)
+  private String endTime;
+
+  @Override
+  public String getStartIndex() {
+    return startIndex;
+  }
+
+  @Override
+  public void setStartIndex(String startIndex) {
+    this.startIndex = startIndex;
+  }
+
+  @Override
+  public String getPage() {
+    return page;
+  }
+
+  @Override
+  public void setPage(String page) {
+    this.page = page;
+  }
+
+  @Override
+  public String getPageSize() {
+    return pageSize;
+  }
+
+  @Override
+  public void setPageSize(String pageSize) {
+    this.pageSize = pageSize;
+  }
+
+  @Override
+  public String getSortBy() {
+    return sortBy;
+  }
+
+  @Override
+  public void setSortBy(String sortBy) {
+    this.sortBy = sortBy;
+  }
+
+  @Override
+  public String getSortType() {
+    return sortType;
+  }
+
+  @Override
+  public void setSortType(String sortType) {
+    this.sortType = sortType;
+  }
+
+  @Override
+  public String getStartTime() {
+    return startTime;
+  }
+
+  @Override
+  public void setStartTime(String startTime) {
+    this.startTime = startTime;
+  }
+
+  @Override
+  public String getEndTime() {
+    return endTime;
+  }
+
+  @Override
+  public void setEndTime(String endTime) {
+    this.endTime = endTime;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java
new file mode 100644
index 0000000..aa08aaf
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditBarGraphRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.FieldParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class FieldAuditBarGraphRequest extends AuditBarGraphRequest implements FieldParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FIELD)
+  private String field;
+
+  @Override
+  public String getField() {
+    return field;
+  }
+
+  @Override
+  public void setField(String field) {
+    this.field = field;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java
new file mode 100644
index 0000000..67502fa
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/FieldAuditLogRequest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.FieldParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class FieldAuditLogRequest extends BaseAuditLogRequest implements FieldParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FIELD)
+  private String field;
+
+  @Override
+  public String getField() {
+    return field;
+  }
+
+  @Override
+  public void setField(String field) {
+    this.field = field;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileRequest.java
new file mode 100644
index 0000000..78c5b33
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileRequest.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LogFileParamDefinition;
+import org.apache.ambari.logsearch.model.request.SearchRequest;
+
+import javax.ws.rs.QueryParam;
+
+public class LogFileRequest implements SearchRequest, LogFileParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_COMPONENT)
+  private String component;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_HOST)
+  private String host;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_LOG_TYPE)
+  private String logType;
+
+  @Override
+  public String getComponent() {
+    return component;
+  }
+
+  @Override
+  public void setComponent(String component) {
+    this.component = component;
+  }
+
+  @Override
+  public String getHost() {
+    return host;
+  }
+
+  @Override
+  public void setHost(String host) {
+    this.host = host;
+  }
+
+  @Override
+  public String getLogType() {
+    return logType;
+  }
+
+  @Override
+  public void setLogType(String logType) {
+    this.logType = logType;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileTailRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileTailRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileTailRequest.java
new file mode 100644
index 0000000..d5b91ab
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/LogFileTailRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LogFileTailParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class LogFileTailRequest extends LogFileRequest implements LogFileTailParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TAIL_SIZE)
+  private String tailSize;
+
+  @Override
+  public String getTailSize() {
+    return tailSize;
+  }
+
+  @Override
+  public void setTailSize(String tailSize) {
+    this.tailSize = tailSize;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/QueryRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/QueryRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/QueryRequest.java
new file mode 100644
index 0000000..0ce788c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/QueryRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.QueryParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class QueryRequest extends CommonSearchRequest implements QueryParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_QUERY)
+  private String query;
+
+  @Override
+  public String getQuery() {
+    return query;
+  }
+
+  @Override
+  public void setQuery(String query) {
+    this.query = query;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java
new file mode 100644
index 0000000..a6aadbb
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceAnyGraphRequest.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.AnyGraphParamDefinition;
+import org.apache.ambari.logsearch.model.request.DateRangeParamDefinition;
+import org.apache.ambari.logsearch.model.request.UnitParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceAnyGraphRequest extends ServiceLogRequest
+  implements AnyGraphParamDefinition, DateRangeParamDefinition, UnitParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_XAXIS)
+  private String xAxis;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_YAXIS)
+  private String yAxis;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_STACK_BY)
+  private String stackBy;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT)
+  private String unit;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FROM)
+  private String from;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TO)
+  private String to;
+
+  @Override
+  public String getxAxis() {
+    return xAxis;
+  }
+
+  @Override
+  public void setxAxis(String xAxis) {
+    this.xAxis = xAxis;
+  }
+
+  @Override
+  public String getyAxis() {
+    return yAxis;
+  }
+
+  @Override
+  public void setyAxis(String yAxis) {
+    this.yAxis = yAxis;
+  }
+
+  @Override
+  public String getStackBy() {
+    return stackBy;
+  }
+
+  @Override
+  public void setStackBy(String stackBy) {
+    this.stackBy = stackBy;
+  }
+
+  @Override
+  public String getUnit() {
+    return unit;
+  }
+
+  @Override
+  public void setUnit(String unit) {
+    this.unit = unit;
+  }
+
+  @Override
+  public String getFrom() {
+    return from;
+  }
+
+  @Override
+  public void setFrom(String from) {
+    this.from = from;
+  }
+
+  @Override
+  public String getTo() {
+    return to;
+  }
+
+  @Override
+  public void setTo(String to) {
+    this.to = to;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceExtremeDatesRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceExtremeDatesRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceExtremeDatesRequest.java
new file mode 100644
index 0000000..8207c5d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceExtremeDatesRequest.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.BundleIdParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceExtremeDatesRequest extends CommonSearchRequest implements BundleIdParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_BUNDLE_ID)
+  private String bundleId;
+
+  @Override
+  public String getBundleId() {
+    return bundleId;
+  }
+
+  @Override
+  public void setBundleId(String bundleId) {
+    this.bundleId = bundleId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java
new file mode 100644
index 0000000..1618aa1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceGraphRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.UnitParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceGraphRequest extends ServiceLogFileRequest implements UnitParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_UNIT)
+  private String unit;
+
+  @Override
+  public String getUnit() {
+    return unit;
+  }
+
+  @Override
+  public void setUnit(String unit) {
+    this.unit = unit;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java
new file mode 100644
index 0000000..6ae0dc9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogExportRequest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.FormatParamDefinition;
+import org.apache.ambari.logsearch.model.request.UtcOffsetParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceLogExportRequest extends ServiceLogFileRequest implements FormatParamDefinition, UtcOffsetParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FORMAT)
+  private String format;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_UTC_OFFSET)
+  private String utcOffset;
+
+  @Override
+  public String getFormat() {
+    return format;
+  }
+
+  @Override
+  public void setFormat(String format) {
+    this.format = format;
+  }
+
+  @Override
+  public String getUtcOffset() {
+    return utcOffset;
+  }
+
+  @Override
+  public void setUtcOffset(String utcOffset) {
+    this.utcOffset = utcOffset;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogFileRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogFileRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogFileRequest.java
new file mode 100644
index 0000000..6c7078a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogFileRequest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import io.swagger.annotations.ApiParam;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.ServiceLogFileParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceLogFileRequest extends BaseServiceLogRequest implements ServiceLogFileParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_HOST_LOG_FILE)
+  private String hostLogFile;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_COMPONENT_LOG_FILE)
+  private String componentLogFile;
+
+  @Override
+  public String getHostLogFile() {
+    return hostLogFile;
+  }
+
+  @Override
+  public void setHostLogFile(String hostLogFile) {
+    this.hostLogFile = hostLogFile;
+  }
+
+  @Override
+  public String getComponentLogFile() {
+    return componentLogFile;
+  }
+
+  @Override
+  public void setComponentLogFile(String componentLogFile) {
+    this.componentLogFile = componentLogFile;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java
new file mode 100644
index 0000000..9a46de3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogRequest.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LastPageParamDefinition;
+import org.apache.ambari.logsearch.model.request.ServiceLogSearchParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceLogRequest extends ServiceLogFileRequest implements ServiceLogSearchParamDefinition, LastPageParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_KEYWORD)
+  private String keyWord;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_SOURCE_LOG_ID)
+  private String sourceLogId;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_KEYWORD_TYPE)
+  private String keywordType;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_TOKEN)
+  private String token;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_LAST_PAGE)
+  private boolean isLastPage;
+
+  @Override
+  public String getKeyWord() {
+    return keyWord;
+  }
+
+  @Override
+  public void setKeyWord(String keyWord) {
+    this.keyWord = keyWord;
+  }
+
+  @Override
+  public String getSourceLogId() {
+    return sourceLogId;
+  }
+
+  @Override
+  public void setSourceLogId(String sourceLogId) {
+    this.sourceLogId = sourceLogId;
+  }
+
+  @Override
+  public String getKeywordType() {
+    return keywordType;
+  }
+
+  @Override
+  public void setKeywordType(String keywordType) {
+    this.keywordType = keywordType;
+  }
+
+  @Override
+  public String getToken() {
+    return token;
+  }
+
+  @Override
+  public void setToken(String token) {
+    this.token = token;
+  }
+
+  @Override
+  public boolean isLastPage() {
+    return isLastPage;
+  }
+
+  @Override
+  public void setLastPage(boolean lastPage) {
+    isLastPage = lastPage;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java
new file mode 100644
index 0000000..516c328
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/ServiceLogTruncatedRequest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.LogTruncatedParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class ServiceLogTruncatedRequest extends ServiceLogFileRequest implements LogTruncatedParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_ID)
+  private String id;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_SCROLL_TYPE)
+  private String scrollType;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_NUMBER_ROWS)
+  private String numberRows;
+
+  @Override
+  public String getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(String id) {
+    this.id = id;
+  }
+
+  @Override
+  public String getScrollType() {
+    return scrollType;
+  }
+
+  @Override
+  public void setScrollType(String scrollType) {
+    this.scrollType = scrollType;
+  }
+
+  @Override
+  public String getNumberRows() {
+    return numberRows;
+  }
+
+  @Override
+  public void setNumberRows(String numberRows) {
+    this.numberRows = numberRows;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/SimpleQueryRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/SimpleQueryRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/SimpleQueryRequest.java
new file mode 100644
index 0000000..eec4379
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/SimpleQueryRequest.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.QueryParamDefinition;
+import org.apache.ambari.logsearch.model.request.SearchRequest;
+
+import javax.ws.rs.QueryParam;
+
+
+public class SimpleQueryRequest implements SearchRequest, QueryParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_QUERY)
+  private String query;
+
+  @Override
+  public String getQuery() {
+    return query;
+  }
+
+  @Override
+  public void setQuery(String query) {
+    this.query = query;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserConfigRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserConfigRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserConfigRequest.java
new file mode 100644
index 0000000..c99ea75
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserConfigRequest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.UserConfigParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class UserConfigRequest extends CommonSearchRequest implements UserConfigParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_USER_ID)
+  private String userId;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FILE_NAME)
+  private String filterName;
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_ROW_TYPE)
+  private String rowType;
+
+  @Override
+  public String getUserId() {
+    return userId;
+  }
+
+  @Override
+  public void setUserId(String userId) {
+    this.userId = userId;
+  }
+
+  @Override
+  public String getFilterName() {
+    return filterName;
+  }
+
+  @Override
+  public void setFilterName(String filterName) {
+    this.filterName = filterName;
+  }
+
+  @Override
+  public String getRowType() {
+    return rowType;
+  }
+
+  @Override
+  public void setRowType(String rowType) {
+    this.rowType = rowType;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java
new file mode 100644
index 0000000..1583f94
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/UserExportRequest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.request.impl;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.model.request.FormatParamDefinition;
+
+import javax.ws.rs.QueryParam;
+
+public class UserExportRequest extends FieldAuditLogRequest implements FormatParamDefinition {
+
+  @QueryParam(LogSearchConstants.REQUEST_PARAM_FORMAT)
+  private String format;
+
+  @Override
+  public String getFormat() {
+    return format;
+  }
+
+  @Override
+  public void setFormat(String format) {
+    this.format = format;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
new file mode 100644
index 0000000..6df56a1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogData.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.Date;
+import java.util.List;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public interface AuditLogData extends CommonLogData {
+
+  @JsonProperty("logType")
+  String getLogType();
+
+  void setLogType(String logType);
+
+  @JsonProperty("policy")
+  String getPolicy();
+
+  void setPolicy(String policy);
+
+  @JsonProperty("access")
+  String getAccess();
+
+  void setAccess(String access);
+
+  @JsonProperty("action")
+  String getAction();
+
+  void setAction(String action);
+
+  @JsonProperty("agent")
+  String getAgent();
+
+  void setAgent(String agent);
+
+  @JsonProperty("agentHost")
+  String getAgentHost();
+
+  void setAgentHost(String agentHost);
+
+  @JsonProperty("cliIP")
+  String getClientIp();
+
+  void setClientIp(String clientIp);
+
+  @JsonProperty("cliType")
+  String getClientType();
+
+  public void setClientType(String clientType);
+
+  @JsonProperty("reqContext")
+  String getRequestContext();
+
+  void setRequestContext(String requestContext);
+
+  @JsonProperty("enforcer")
+  String getEnforcer();
+
+  void setEnforcer(String enforcer);
+
+  @JsonProperty("evtTime")
+  Date getEventTime();
+
+  void setEventTime(Date eventTime);
+
+  @JsonProperty("reason")
+  String getReason();
+
+  void setReason(String reason);
+
+  @JsonProperty("proxyUsers")
+  List<String> getProxyUsers();
+
+  void setProxyUsers(List<String> proxyUsers);
+
+  @JsonProperty("repo")
+  String getRepo();
+
+  void setRepo(String repo);
+
+  @JsonProperty("repoType")
+  String getRepoType();
+
+  void setRepoType(String repoType);
+
+  @JsonProperty("reqData")
+  String getRequestData();
+
+  void setRequestData(String requestData);
+
+  @JsonProperty("reqUser")
+  String getRequestUser();
+
+  void setRequestUser(String requestUser);
+
+  @JsonProperty("resType")
+  String getResponseType();
+
+  void setResponseType(String requestType);
+
+  @JsonProperty("resource")
+  String getResource();
+
+  void setResource(String resource);
+
+  @JsonProperty("result")
+  Integer getResult();
+
+  void setResult(Integer result);
+
+  @JsonProperty("sess")
+  String getSession();
+
+  void setSession(String session);
+
+  @JsonProperty("tags")
+  List<String> getTags();
+
+  void setTags(List<String> tags);
+
+  @JsonProperty("tags_str")
+  String getTagsStr();
+
+  void setTagsStr(String tagsStr);
+
+  @JsonProperty("text")
+  String getText();
+
+  void setText(String text);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java
new file mode 100644
index 0000000..a886a96
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/AuditLogResponse.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class AuditLogResponse extends LogSearchResponse<AuditLogData> {
+
+  @ApiModelProperty
+  private List<AuditLogData> logList;
+
+  @Override
+  public List<AuditLogData> getLogList() {
+    return logList;
+  }
+
+  @Override
+  public void setLogList(List<AuditLogData> logList) {
+    this.logList = logList;
+  }
+
+  @Override
+  public int getListSize() {
+    return logList == null ? 0 : logList.size();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java
new file mode 100644
index 0000000..3f2bd6f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphData.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collection;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class BarGraphData implements Serializable {
+
+  @ApiModelProperty
+  private Collection<NameValueData> dataCount = new ArrayList<>();
+  @ApiModelProperty
+  private String name;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public Collection<NameValueData> getDataCount() {
+    return dataCount;
+  }
+
+  public void setDataCount(Collection<NameValueData> dateValueCounts) {
+    this.dataCount = dateValueCounts;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java
new file mode 100644
index 0000000..4d5d166
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/BarGraphDataListResponse.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class BarGraphDataListResponse {
+
+  @ApiModelProperty
+  protected Collection<BarGraphData> graphData = new ArrayList<>();
+
+  public Collection<BarGraphData> getGraphData() {
+    return graphData;
+  }
+
+  public void setGraphData(Collection<BarGraphData> graphData) {
+    this.graphData = graphData;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
new file mode 100644
index 0000000..e2bba3f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.Date;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public interface CommonLogData extends LogData {
+
+  @JsonProperty("id")
+  String getId();
+
+  void setId(String id);
+
+  @JsonProperty("case_id")
+  String getCaseId();
+
+  void setCaseId(String caseId);
+
+  @JsonProperty("log_message")
+  String getLogMessage();
+
+  void setLogMessage(String logMessage);
+
+  @JsonProperty("bundle_id")
+  String getBundleId();
+
+  void setBundleId(String bundleId);
+
+  @JsonProperty("logfile_line_number")
+  Integer getLogFileLineNumber();
+
+  void setLogFileLineNumber(Integer logFileLineNumber);
+
+  @JsonProperty("file")
+  String getFile();
+
+  void setFile(String file);
+
+  @JsonProperty("type")
+  String getType();
+
+  void setType(String type);
+
+  @JsonProperty("seq_num")
+  Long getSeqNum();
+
+  void setSeqNum(Long seqNum);
+
+  @JsonProperty("message_md5")
+  String getMessageMd5();
+
+  void setMessageMd5(String messageMd5);
+
+  @JsonProperty("cluster")
+  String getCluster();
+
+  void setCluster(String cluster);
+
+  @JsonProperty("event_count")
+  Long getEventCount();
+
+  void setEventCount(Long eventCount);
+
+  @JsonProperty("event_md5")
+  public String getEventMd5();
+
+  public void setEventMd5(String eventMd5);
+
+  @JsonProperty("event_dur_ms")
+  public Long getEventDurationMs();
+
+  public void setEventDurationMs(Long eventDurationMs);
+
+  @JsonProperty("_ttl_")
+  String getTtl();
+
+  void setTtl(String ttl);
+
+  @JsonProperty("_expire_at_")
+  Date getExpire();
+
+  void setExpire(Date expire);
+
+  @JsonProperty("_version_")
+  Long getVersion();
+
+  void setVersion(Long version);
+
+  @JsonProperty("_router_field_")
+  Integer getRouterField();
+
+  void setRouterField(Integer routerField);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java
new file mode 100644
index 0000000..6c15f9c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/ComponentTypeLogData.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public interface ComponentTypeLogData extends LogData {
+
+  @JsonProperty("type")
+  String getType();
+
+  void setType(String type);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java
new file mode 100644
index 0000000..fabaad2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountData.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class CountData {
+
+  @ApiModelProperty
+  private String name;
+
+  @ApiModelProperty
+  private Long count;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public Long getCount() {
+    return count;
+  }
+
+  public void setCount(Long count) {
+    this.count = count;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java
new file mode 100644
index 0000000..2543dcc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CountDataListResponse.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class CountDataListResponse extends SearchResponse {
+
+  @ApiModelProperty
+  private List<CountData> vCounts;
+
+  public List<CountData> getvCounts() {
+    return vCounts;
+  }
+
+  public void setvCounts(List<CountData> vCounts) {
+    this.vCounts = vCounts;
+  }
+
+  @Override
+  public int getListSize() {
+    if (vCounts != null)
+      return vCounts.size();
+    return 0;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java
new file mode 100644
index 0000000..e39ec95
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphData.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.io.Serializable;
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class GraphData implements Serializable {
+
+  @ApiModelProperty
+  private String name;
+
+  @ApiModelProperty
+  private Long count;
+
+  @ApiModelProperty
+  private List<GraphData> dataList;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public Long getCount() {
+    return count;
+  }
+
+  public void setCount(Long count) {
+    this.count = count;
+  }
+
+  public List<GraphData> getDataList() {
+    return dataList;
+  }
+
+  public void setDataList(List<GraphData> dataList) {
+    this.dataList = dataList;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java
new file mode 100644
index 0000000..4357c28
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GraphDataListResponse.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.List;
+
+@ApiModel
+public class GraphDataListResponse {
+
+  @ApiModelProperty
+  protected List<GraphData> graphData;
+
+  public List<GraphData> getGraphData() {
+    return graphData;
+  }
+
+  public void setGraphData(List<GraphData> graphData) {
+    this.graphData = graphData;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java
new file mode 100644
index 0000000..92c51f2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/GroupListResponse.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@ApiModel
+public class GroupListResponse extends SearchResponse {
+
+  @ApiModelProperty
+  private List<LogData> groupList = new ArrayList<>();
+
+  public List<LogData> getGroupList() {
+    return groupList;
+  }
+
+  public void setGroupList(List<LogData> groupList) {
+    this.groupList = groupList;
+  }
+
+  @Override
+  public int getListSize() {
+    if (groupList != null){
+      return groupList.size();
+    }
+    return 0;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java
new file mode 100644
index 0000000..8cab1ab
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/HostLogData.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+@JsonIgnoreProperties(ignoreUnknown = true)
+public interface HostLogData extends LogData {
+  @JsonProperty("host")
+  String getHost();
+
+  void setHost(String host);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java
new file mode 100644
index 0000000..ec6c34c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogData.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import org.apache.ambari.logsearch.common.Marker;
+
+@Marker
+public interface LogData {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java
new file mode 100644
index 0000000..5a67606
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileData.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+@ApiModel
+public class LogFileData {
+
+  @ApiModelProperty
+  private String name;
+
+  @ApiModelProperty
+  private String path;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getPath() {
+    return path;
+  }
+
+  public void setPath(String path) {
+    this.path = path;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java
new file mode 100644
index 0000000..57614c3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogFileDataListResponse.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@ApiModel
+public class LogFileDataListResponse extends SearchResponse {
+
+  @ApiModelProperty
+  private List<LogFileData> logFiles = new ArrayList<LogFileData>();
+
+  @Override
+  public int getListSize() {
+    if (logFiles == null) {
+      return 0;
+    }
+    return logFiles.size();
+  }
+
+  public List<LogFileData> getLogFiles() {
+    return logFiles;
+  }
+
+  public void setLogFiles(List<LogFileData> logFiles) {
+    this.logFiles = logFiles;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java
new file mode 100644
index 0000000..c075fe2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogListResponse.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import java.util.List;
+
+public interface LogListResponse<T extends LogData> {
+  List<T> getLogList();
+
+  void setLogList(List<T> logList);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java
new file mode 100644
index 0000000..a63415b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/LogSearchResponse.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+abstract public class LogSearchResponse<T extends LogData> extends SearchResponse implements LogListResponse<T> {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java
new file mode 100644
index 0000000..3f320e7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueData.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class NameValueData {
+
+  @ApiModelProperty
+  private String name;
+  @ApiModelProperty
+  private String value;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getValue() {
+    return value;
+  }
+
+  public void setValue(String value) {
+    if (value.contains(".") && (value.contains("e") || value.contains("E"))) {
+      this.value = getExponentialValueReplaced(value);
+    } else {
+      this.value = value;
+    }
+  }
+
+  private String getExponentialValueReplaced(String value) {
+    try{
+      Double number = Double.parseDouble(value);
+      return String.format("%.0f", number);
+    } catch(Exception e){
+      return value;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java
new file mode 100644
index 0000000..4cb983f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/NameValueDataListResponse.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.model.response;
+
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@ApiModel
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class NameValueDataListResponse extends SearchResponse {
+  @ApiModelProperty
+  protected List<NameValueData> vNameValues = new ArrayList<>();
+
+  public List<NameValueData> getvNameValues() {
+    return vNameValues;
+  }
+
+  public void setvNameValues(List<NameValueData> vNameValues) {
+    this.vNameValues = vNameValues;
+  }
+
+  @Override
+  public int getListSize() {
+    if (vNameValues != null) {
+      return vNameValues.size();
+    }
+    return 0;
+  }
+}


[40/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
AMBARI-18310. Refactor logsearch portal side code (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e9e834bf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e9e834bf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e9e834bf

Branch: refs/heads/branch-dev-logsearch
Commit: e9e834bfada61da09c1161ec9ffef429859fff0e
Parents: 82419b1
Author: oleewere <ol...@gmail.com>
Authored: Mon Sep 5 18:27:20 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:00 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-portal/pom.xml             |     2 +-
 .../ambari-logsearch-portal/production/build.js |    28 -
 .../ambari-logsearch-portal/production/r.js     | 32140 -----------------
 .../configsets/audit_logs/conf/managed-schema   |     1 -
 .../configsets/hadoop_logs/conf/managed-schema  |    24 -
 .../src/main/dev/solrcloud/README.md            |    35 -
 .../src/main/dev/solrcloud/reset_collections.sh |    28 -
 .../src/main/dev/solrcloud/restart_solr.sh      |    37 -
 .../src/main/dev/solrcloud/update_config.sh     |    49 -
 .../org/apache/ambari/logsearch/LogSearch.java  |     1 -
 .../ambari/logsearch/common/ConfigHelper.java   |    34 +-
 .../logsearch/common/ExternalServerClient.java  |    17 +-
 .../logsearch/common/LogSearchConstants.java    |    58 +-
 .../apache/ambari/logsearch/common/Marker.java  |    29 +
 .../ambari/logsearch/common/PropertyMapper.java |    59 +
 .../ambari/logsearch/common/SearchCriteria.java |   304 -
 .../ambari/logsearch/conf/ApiDocConfig.java     |    59 +
 .../logsearch/conf/ApplicationConfig.java       |    84 +
 .../ambari/logsearch/conf/AuthConfig.java       |    87 +
 .../logsearch/conf/SolrAuditLogConfig.java      |   181 +
 .../ambari/logsearch/conf/SolrColumnConfig.java |    36 +
 .../ambari/logsearch/conf/SolrConfig.java       |    50 +
 .../logsearch/conf/SolrConnectionConfig.java    |    49 +
 .../logsearch/conf/SolrKerberosConfig.java      |    48 +
 .../logsearch/conf/SolrServiceLogConfig.java    |   144 +
 .../ambari/logsearch/conf/SolrUserConfig.java   |   104 +
 .../ambari/logsearch/dao/AuditSolrDao.java      |    29 +-
 .../logsearch/dao/ServiceLogsSolrDao.java       |    21 +-
 .../ambari/logsearch/dao/SolrDaoBase.java       |    19 +-
 .../ambari/logsearch/dao/UserConfigSolrDao.java |    32 +-
 .../apache/ambari/logsearch/dao/UserDao.java    |    10 +-
 .../ambari/logsearch/doc/DocConstants.java      |    26 +-
 .../logsearch/graph/GraphDataGenerator.java     |    81 +-
 .../logsearch/graph/GraphDataGeneratorBase.java |    34 +-
 .../logsearch/manager/AuditLogsManager.java     |   597 +
 .../ambari/logsearch/manager/AuditMgr.java      |   630 -
 .../logsearch/manager/JsonManagerBase.java      |    69 +
 .../logsearch/manager/LogFileManager.java       |   155 +
 .../ambari/logsearch/manager/LogFileMgr.java    |   145 -
 .../ambari/logsearch/manager/LogsMgr.java       |  1896 -
 .../ambari/logsearch/manager/ManagerBase.java   |   221 +
 .../ambari/logsearch/manager/MgrBase.java       |   256 -
 .../ambari/logsearch/manager/PublicManager.java |    48 +
 .../ambari/logsearch/manager/PublicMgr.java     |    46 -
 .../logsearch/manager/ServiceLogsManager.java   |  1917 +
 .../logsearch/manager/SessionManager.java       |    76 +
 .../ambari/logsearch/manager/SessionMgr.java    |    76 -
 .../logsearch/manager/UserConfigManager.java    |   276 +
 .../ambari/logsearch/manager/UserConfigMgr.java |   276 -
 .../model/request/AnyGraphParamDefinition.java  |    44 +
 .../model/request/BundleIdParamDefinition.java  |    32 +
 .../request/CommonSearchParamDefinition.java    |    69 +
 .../model/request/DateRangeParamDefinition.java |    38 +
 .../model/request/FieldParamDefinition.java     |    32 +
 .../model/request/FormatParamDefinition.java    |    32 +
 .../model/request/LastPageParamDefinition.java  |    31 +
 .../model/request/LogFileParamDefinition.java   |    45 +
 .../request/LogFileTailParamDefinition.java     |    32 +
 .../model/request/LogParamDefinition.java       |    68 +
 .../request/LogTruncatedParamDefinition.java    |    44 +
 .../model/request/QueryParamDefinition.java     |    32 +
 .../logsearch/model/request/SearchRequest.java  |    25 +
 .../request/ServiceLogFileParamDefinition.java  |    38 +
 .../request/ServiceLogParamDefinition.java      |    80 +
 .../ServiceLogSearchParamDefinition.java        |    50 +
 .../model/request/UnitParamDefinition.java      |    33 +
 .../request/UserConfigParamDefinition.java      |    44 +
 .../model/request/UtcOffsetParamDefinition.java |    32 +
 .../model/request/impl/AnyGraphRequest.java     |   109 +
 .../request/impl/AuditBarGraphRequest.java      |    40 +
 .../model/request/impl/AuditLogRequest.java     |    40 +
 .../model/request/impl/BaseAuditLogRequest.java |    53 +
 .../model/request/impl/BaseLogRequest.java      |   118 +
 .../request/impl/BaseServiceLogRequest.java     |   186 +
 .../model/request/impl/CommonSearchRequest.java |   119 +
 .../request/impl/FieldAuditBarGraphRequest.java |    40 +
 .../request/impl/FieldAuditLogRequest.java      |    41 +
 .../model/request/impl/LogFileRequest.java      |    67 +
 .../model/request/impl/LogFileTailRequest.java  |    40 +
 .../model/request/impl/QueryRequest.java        |    40 +
 .../request/impl/ServiceAnyGraphRequest.java    |   109 +
 .../impl/ServiceExtremeDatesRequest.java        |    41 +
 .../model/request/impl/ServiceGraphRequest.java |    40 +
 .../request/impl/ServiceLogExportRequest.java   |    54 +
 .../request/impl/ServiceLogFileRequest.java     |    54 +
 .../model/request/impl/ServiceLogRequest.java   |    93 +
 .../impl/ServiceLogTruncatedRequest.java        |    66 +
 .../model/request/impl/SimpleQueryRequest.java  |    42 +
 .../model/request/impl/UserConfigRequest.java   |    66 +
 .../model/request/impl/UserExportRequest.java   |    40 +
 .../logsearch/model/response/AuditLogData.java  |   149 +
 .../model/response/AuditLogResponse.java        |    48 +
 .../logsearch/model/response/BarGraphData.java  |    53 +
 .../response/BarGraphDataListResponse.java      |    42 +
 .../logsearch/model/response/CommonLogData.java |   113 +
 .../model/response/ComponentTypeLogData.java    |    32 +
 .../logsearch/model/response/CountData.java     |    50 +
 .../model/response/CountDataListResponse.java   |    48 +
 .../logsearch/model/response/GraphData.java     |    64 +
 .../model/response/GraphDataListResponse.java   |    39 +
 .../model/response/GroupListResponse.java       |    48 +
 .../logsearch/model/response/HostLogData.java   |    30 +
 .../logsearch/model/response/LogData.java       |    25 +
 .../logsearch/model/response/LogFileData.java   |    48 +
 .../model/response/LogFileDataListResponse.java |    48 +
 .../model/response/LogListResponse.java         |    27 +
 .../model/response/LogSearchResponse.java       |    22 +
 .../logsearch/model/response/NameValueData.java |    62 +
 .../response/NameValueDataListResponse.java     |    49 +
 .../logsearch/model/response/NodeData.java      |   111 +
 .../model/response/NodeListResponse.java        |    50 +
 .../model/response/SearchResponse.java          |   110 +
 .../model/response/ServiceLogData.java          |    63 +
 .../model/response/ServiceLogResponse.java      |    48 +
 .../ambari/logsearch/query/QueryGeneration.java |    18 +-
 .../logsearch/query/QueryGenerationBase.java    |    12 +-
 .../AbstractCommonAuditLogRequestConverter.java |    48 +
 .../AbstractCommonSearchRequestConverter.java   |    53 +
 ...bstractCommonServiceLogRequestConverter.java |    56 +
 .../converter/AnyGraphRequestConverter.java     |    39 +
 .../AuditBarGraphRequestConverter.java          |    34 +
 .../converter/AuditLogRequestConverter.java     |    34 +
 .../converter/BaseAuditLogRequestConverter.java |    33 +
 .../BaseServiceLogRequestConverter.java         |    32 +
 .../FieldAuditLogRequestConverter.java          |    34 +
 .../FieldBarGraphRequestConverter.java          |    35 +
 .../converter/LogFileRequestConverter.java      |    37 +
 .../converter/LogFileTailRequestConverter.java  |    38 +
 .../ServiceAnyGraphRequestConverter.java        |    39 +
 .../ServiceExtremeDatesRequestConverter.java    |    35 +
 .../converter/ServiceGraphRequestConverter.java |    36 +
 .../ServiceLogExportRequestConverter.java       |    38 +
 .../ServiceLogFileRequestConverter.java         |    36 +
 .../converter/ServiceLogRequestConverter.java   |    41 +
 .../ServiceLogTruncatedRequestConverter.java    |    38 +
 .../converter/SimpleQueryRequestConverter.java  |    34 +
 .../converter/UserConfigRequestConverter.java   |    38 +
 .../converter/UserExportRequestConverter.java   |    35 +
 .../query/model/AnyGraphSearchCriteria.java     |    25 +
 .../model/AuditBarGraphSearchCriteria.java      |    25 +
 .../query/model/AuditLogSearchCriteria.java     |    25 +
 .../query/model/CommonSearchCriteria.java       |   100 +
 .../model/FieldAuditBarGraphSearchCriteria.java |    25 +
 .../model/FieldAuditLogSearchCriteria.java      |    25 +
 .../query/model/LogFileSearchCriteria.java      |    25 +
 .../query/model/LogFileTailSearchCriteria.java  |    25 +
 .../logsearch/query/model/SearchCriteria.java   |   136 +
 .../model/ServiceAnyGraphSearchCriteria.java    |    25 +
 .../model/ServiceExtremeDatesCriteria.java      |    25 +
 .../query/model/ServiceGraphSearchCriteria.java |    25 +
 .../model/ServiceLogExportSearchCriteria.java   |    25 +
 .../model/ServiceLogFileSearchCriteria.java     |    25 +
 .../query/model/ServiceLogSearchCriteria.java   |    25 +
 .../ServiceLogTruncatedSearchCriteria.java      |    25 +
 .../query/model/UserConfigSearchCriteria.java   |    25 +
 .../query/model/UserExportSearchCriteria.java   |    25 +
 .../ambari/logsearch/rest/AuditLogsREST.java    |   283 -
 .../logsearch/rest/AuditLogsResource.java       |   157 +
 .../ambari/logsearch/rest/LogFileREST.java      |    84 -
 .../ambari/logsearch/rest/LogFileResource.java  |    69 +
 .../ambari/logsearch/rest/PublicREST.java       |    48 -
 .../ambari/logsearch/rest/PublicResource.java   |    48 +
 .../ambari/logsearch/rest/ServiceLogsREST.java  |   574 -
 .../logsearch/rest/ServiceLogsResource.java     |   238 +
 .../ambari/logsearch/rest/UserConfigREST.java   |   128 -
 .../logsearch/rest/UserConfigResource.java      |   116 +
 .../ambari/logsearch/service/UserService.java   |     5 +-
 .../logsearch/solr/model/SolrAuditLogData.java  |   339 +
 .../logsearch/solr/model/SolrCommonLogData.java |   248 +
 .../solr/model/SolrComponentTypeLogData.java    |    38 +
 .../logsearch/solr/model/SolrHostLogData.java   |    38 +
 .../solr/model/SolrServiceLogData.java          |   118 +
 .../apache/ambari/logsearch/util/BizUtil.java   |    40 +-
 .../ambari/logsearch/view/VBarDataList.java     |    46 -
 .../ambari/logsearch/view/VBarGraphData.java    |    50 -
 .../apache/ambari/logsearch/view/VCount.java    |    62 -
 .../ambari/logsearch/view/VCountList.java       |    68 -
 .../ambari/logsearch/view/VGraphData.java       |    79 -
 .../ambari/logsearch/view/VGraphInfo.java       |    62 -
 .../ambari/logsearch/view/VGroupList.java       |    66 -
 .../apache/ambari/logsearch/view/VLogFile.java  |    56 -
 .../ambari/logsearch/view/VLogFileList.java     |    70 -
 .../ambari/logsearch/view/VNameValue.java       |   123 -
 .../ambari/logsearch/view/VNameValueList.java   |    78 -
 .../org/apache/ambari/logsearch/view/VNode.java |   122 -
 .../apache/ambari/logsearch/view/VNodeList.java |    55 -
 .../ambari/logsearch/view/VSolrLogList.java     |    67 -
 .../apache/ambari/logsearch/view/VString.java   |    52 -
 .../ambari/logsearch/view/VSummaryCount.java    |    66 -
 .../logsearch/view/VSummaryCountList.java       |    37 -
 ...LogsearchSecurityContextFormationFilter.java |    10 +-
 .../LogsearchAuthenticationProvider.java        |    15 +-
 ...rchExternalServerAuthenticationProvider.java |    18 +-
 .../LogsearchFileAuthenticationProvider.java    |     5 +-
 .../src/main/scripts/add_config_set.sh          |    43 -
 .../src/main/scripts/create_collections.sh      |    37 -
 .../main/webapp/META-INF/applicationContext.xml |    69 -
 .../META-INF/security-applicationContext.xml    |     1 +
 .../src/main/webapp/WEB-INF/web.xml             |    27 +-
 .../scripts/model_bases/VUserFilterBase.js      |     2 +-
 .../views/dashboard/ComponentListView.js        |     2 +-
 .../scripts/views/graphs/GraphLayoutView.js     |     2 +-
 .../src/main/webapp/templates/graphs/backup.js  |     2 +-
 .../ambari/logsearch/dao/AuditSolrDaoTest.java  |    37 +-
 .../logsearch/dao/ServiceLogsSolrDaoTest.java   |    37 +-
 .../ambari/logsearch/dao/SolrDaoBaseTest.java   |    73 +-
 .../logsearch/dao/UserConfigSolrDaoTest.java    |    56 +-
 .../ambari/logsearch/dao/UserDaoTest.java       |    58 -
 .../src/test/resources/applicationContext.xml   |    53 -
 209 files changed, 11313 insertions(+), 38775 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index b36c9bb..ebba82c 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -548,7 +548,7 @@
     </dependency>
     <dependency>
       <groupId>org.glassfish.jersey.media</groupId>
-      <artifactId>jersey-media-moxy</artifactId>
+      <artifactId>jersey-media-json-jackson</artifactId>
       <version>${jersey.version}</version>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/production/build.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/production/build.js b/ambari-logsearch/ambari-logsearch-portal/production/build.js
deleted file mode 100644
index afeea67..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/production/build.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-{
-    appDir: "../src/main/webapp",
-    baseUrl: "scripts",
-    dir: "../target/webapp-build",
-    modules: [
-        {
-            name: "Init"
-        }
-    ]
-}


[37/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
index 0ceb76b..8e14452 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -26,18 +26,25 @@ public class DocConstants {
     public static final String STACK_BY_D = "The graph property for stacking the plot";
     public static final String EXCLUDE_QUERY_D = "Exclude the values in query result e.g.: [{message:*timeout*}]";
     public static final String INCLUDE_QUERY_D = "Include the values in query result e.g.: [{message:*exception*}]";
-    public static final String MUST_BE_D = "Include the components, comman separated values";
-    public static final String MUST_NOT_D = "Exclude the components, comman separated values";
+    public static final String MUST_BE_D = "Include the components, comma separated values";
+    public static final String MUST_NOT_D = "Exclude the components, comma separated values";
     public static final String FROM_D = "Date range param, start date";
     public static final String TO_D = "Date range param, end date";
+    public static final String START_TIME_D = "Date range param which is suportted from browser url";
+    public static final String END_TIME_D = "Date range param which is supported from browser url";
+    public static final String START_INDEX_D = "Start index of the queried result";
+    public static final String SORT_TYPE_D = "Type of sorting (osc, desc)";
+    public static final String SORT_BY_D = "Sorting the results based on this field";
+    public static final String PAGE_D = "Number of pages for the results";
+    public static final String PAGE_SIZE_D = "Page size of the results";
     public static final String UNIT_D = "Aggregate the data with time gap as unit i.e 1MINUTE";
     public static final String QUERY_D = "not required";
     public static final String COLUMN_QUERY_D = "not required";
-    public static final String I_MESSAGE_D = "Include query which will query againt message column";
+    public static final String I_MESSAGE_D = "Include query which will query against message column";
     public static final String G_E_MESSAGE_D = "not required";
-    public static final String E_MESSAGE_D = "Exclude query which will query againt message column";
-    public static final String IS_LAST_PAGE_D = "";
-    public static final String FIELD_D = "Get top ten values for particular field";
+    public static final String E_MESSAGE_D = "Exclude query which will query against message column";
+    public static final String IS_LAST_PAGE_D = "Show last page (true/false)";
+    public static final String FIELD_D = "Get values for particular field";
     public static final String FORMAT_D = "File Export format, can be 'txt' or 'json'";
   }
 
@@ -48,7 +55,6 @@ public class DocConstants {
     public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "Get the data required for line graph";
     public static final String GET_TOP_AUDIT_USERS_OD = "Get the top audit users having maximum access";
     public static final String GET_TOP_AUDIT_RESOURCES_OD = "Get the top audit resources having maximum access";
-    public static final String GET_TOP_AUDIT_COMPONENTS_OD = "not required";
     public static final String GET_LIVE_LOGS_COUNT_OD = "not required";
     public static final String GET_REQUEST_USER_LINE_GRAPH_OD = "not required";
     public static final String GET_ANY_GRAPH_DATA_OD = "Get the data generic enough to use for graph plots";
@@ -59,11 +65,11 @@ public class DocConstants {
   public class ServiceDescriptions {
     public static final String LEVEL_D = "filter for log level";
     public static final String ADVANCED_SEARCH_D = "not required";
-    public static final String TREE_PARAMS_D = "Host hierarchy shown on UI,filtering there is supported by this param";
-    public static final String START_TIME_D = "Date range param which is suportted from browser url";
-    public static final String END_TIME_D = "Date range param which is supported from browser url";
+    public static final String BUNDLE_ID = "filter for host";
+    public static final String TREE_PARAMS_D = "Host hierarchy shown on UI, filtering there is supported by this param";
     public static final String FILE_NAME_D = "File name filter which is supported from browser url";
     public static final String HOST_NAME_D = "Host name filter which is supported from browser url";
+    public static final String DATE_RANGE_LABEL_D = "Date range label (e.g.: Today)";
     public static final String COMPONENT_NAME_D = "Component name filter which is supported from browser url";
     public static final String FIND_D = "Finding particular text on subsequent pages in case of table view with pagination";
     public static final String ID_D = "Log id value for traversing to that particular record with that log id";

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index d84b7b9..bc377e5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -26,14 +26,14 @@ import java.util.List;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.model.response.BarGraphData;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.NameValueData;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.query.QueryGeneration;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VBarDataList;
-import org.apache.ambari.logsearch.view.VBarGraphData;
-import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -44,18 +44,19 @@ import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RangeFacet;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
+import javax.inject.Inject;
+
 @Component
 public class GraphDataGenerator extends GraphDataGeneratorBase {
 
   private static final Logger logger = Logger.getLogger(GraphDataGenerator.class);
 
-  @Autowired
+  @Inject
   private QueryGeneration queryGenerator;
 
-  public VBarDataList getAnyGraphData(SearchCriteria searchCriteria, SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
+  public BarGraphDataListResponse getAnyGraphData(SearchCriteria searchCriteria, SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
     // X axis credentials
     String xAxisField = (String) searchCriteria.getParamValue("xAxis");
     String stackField = (String) searchCriteria.getParamValue("stackBy");
@@ -119,12 +120,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList normalGraph(String xAxisField, String yAxisField, String from, String to, SolrDaoBase solrDaoBase,
+  private BarGraphDataListResponse normalGraph(String xAxisField, String yAxisField, String from, String to, SolrDaoBase solrDaoBase,
       String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
-    VBarGraphData vBarGraphData = new VBarGraphData();
-    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    Collection<BarGraphData> vBarGraphDatas = new ArrayList<BarGraphData>();
+    BarGraphData vBarGraphData = new BarGraphData();
+    Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
     SolrUtil.setMainQuery(solrQuery, null);
     queryGenerator.setSingleIncludeFilter(solrQuery, fieldTime, "[" + from + " TO " + to + "]");
     if (typeXAxis.contains("string") || typeXAxis.contains("key_lower_case") || typeXAxis.contains("text")) {
@@ -140,7 +141,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
               if (countValues != null) {
                 for (Count countValue : countValues) {
                   if (countValue != null) {
-                    VNameValue vNameValue = new VNameValue();
+                    NameValueData vNameValue = new NameValueData();
                     vNameValue.setName(countValue.getName());
                     vNameValue.setValue("" + countValue.getCount());
                     vNameValues.add(vNameValue);
@@ -154,12 +155,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
           }
         }
         if (xAxisField.equalsIgnoreCase(LogSearchConstants.SOLR_LEVEL)) {
-          Collection<VNameValue> sortedVNameValues = new ArrayList<VNameValue>();
+          Collection<NameValueData> sortedVNameValues = new ArrayList<NameValueData>();
           for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
-            VNameValue value = new VNameValue();
+            NameValueData value = new NameValueData();
             value.setName(level);
             String val = "0";
-            for (VNameValue valueLevel : vNameValues) {
+            for (NameValueData valueLevel : vNameValues) {
               if (valueLevel.getName().equalsIgnoreCase(level)) {
                 val = valueLevel.getValue();
                 break;
@@ -168,9 +169,9 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
             value.setValue(val);
             sortedVNameValues.add(value);
           }
-          vBarGraphData.setDataCounts(sortedVNameValues);
+          vBarGraphData.setDataCount(sortedVNameValues);
         } else {
-          vBarGraphData.setDataCounts(vNameValues);
+          vBarGraphData.setDataCount(vNameValues);
         }
         return dataList;
       } catch (SolrException | SolrServerException | IOException e) {
@@ -188,12 +189,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
         if (jsonFacetResponse.toString().equals("{count=0}")) {
           return dataList;
         }
-        VNameValue value = new VNameValue();
+        NameValueData value = new NameValueData();
         String sum = (String) jsonFacetResponse.getVal(1);
         value.setName(xAxisField);
         value.setValue(sum != null ? sum.substring(0, sum.indexOf(".")) : "");
         vNameValues.add(value);
-        vBarGraphData.setDataCounts(vNameValues);
+        vBarGraphData.setDataCount(vNameValues);
         vBarGraphData.setName(xAxisField);
         vBarGraphDatas.add(vBarGraphData);
         dataList.setGraphData(vBarGraphDatas);
@@ -207,10 +208,10 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList nonRangeStackGraph(String xAxisField, String yAxisField, String stackField, String from, String to,
+  private BarGraphDataListResponse nonRangeStackGraph(String xAxisField, String yAxisField, String stackField, String from, String to,
       SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vGraphData = new ArrayList<VBarGraphData>();
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    Collection<BarGraphData> vGraphData = new ArrayList<BarGraphData>();
     String mainQuery = queryGenerator.buildInclusiveRangeFilterQuery(fieldTime, from, to);
     SolrUtil.setMainQuery(solrQuery, mainQuery);
     SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
@@ -238,24 +239,24 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       }
       extractNonRangeStackValuesFromBucket(jsonFacetResponse, stackField, vGraphData, typeXAxis);
       if (LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(stackField) && LogSearchConstants.SOLR_LEVEL.equalsIgnoreCase(xAxisField)) {
-        Collection<VBarGraphData> levelVGraphData = dataList.getGraphData();
-        for (VBarGraphData garphData : levelVGraphData) {
-          Collection<VNameValue> valueList = garphData.getDataCount();
-          Collection<VNameValue> valueListSorted = new ArrayList<VNameValue>();
+        Collection<BarGraphData> levelVGraphData = dataList.getGraphData();
+        for (BarGraphData graphData : levelVGraphData) {
+          Collection<NameValueData> valueList = graphData.getDataCount();
+          Collection<NameValueData> valueListSorted = new ArrayList<NameValueData>();
           for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
             String val = "0";
-            for (VNameValue value : valueList) {
+            for (NameValueData value : valueList) {
               if (value.getName().equalsIgnoreCase(level)) {
                 val = value.getValue();
                 break;
               }
             }
-            VNameValue v1 = new VNameValue();
+            NameValueData v1 = new NameValueData();
             v1.setName(level.toUpperCase());
             v1.setValue(val);
             valueListSorted.add(v1);
           }
-          garphData.setDataCounts(valueListSorted);
+          graphData.setDataCount(valueListSorted);
         }
       }
       return dataList;
@@ -267,12 +268,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList rangeNonStackGraph(String xAxisField, String yAxisField, String from, String to, String unit,
+  private BarGraphDataListResponse rangeNonStackGraph(String xAxisField, String yAxisField, String from, String to, String unit,
       SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vBarGraphDatas = new ArrayList<VBarGraphData>();
-    VBarGraphData vBarGraphData = new VBarGraphData();
-    Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    Collection<BarGraphData> vBarGraphDatas = new ArrayList<BarGraphData>();
+    BarGraphData vBarGraphData = new BarGraphData();
+    Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
     SolrUtil.setMainQuery(solrQuery, null);
     if (SolrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
@@ -290,12 +291,12 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
               List<RangeFacet.Count> listCount = rangeFacet.get(0).getCounts();
               if (listCount != null) {
                 for (RangeFacet.Count cnt : listCount) {
-                  VNameValue vNameValue = new VNameValue();
+                  NameValueData vNameValue = new NameValueData();
                   vNameValue.setName(cnt.getValue());
                   vNameValue.setValue("" + cnt.getCount());
                   vNameValues.add(vNameValue);
                 }
-                vBarGraphData.setDataCounts(vNameValues);
+                vBarGraphData.setDataCount(vNameValues);
                 vBarGraphDatas.add(vBarGraphData);
                 vBarGraphData.setName(xAxisField);
                 dataList.setGraphData(vBarGraphDatas);
@@ -312,10 +313,10 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList rangeStackGraph(String xAxisField, String stackField, String from, String to, String unit,
+  private BarGraphDataListResponse rangeStackGraph(String xAxisField, String stackField, String from, String to, String unit,
       SolrDaoBase solrDaoBase, SolrQuery solrQuery) {
-    VBarDataList dataList = new VBarDataList();
-    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    List<BarGraphData> histogramData = new ArrayList<BarGraphData>();
     SolrUtil.setMainQuery(solrQuery, null);
     SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     String jsonHistogramQuery =

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
index e7fab9a..c57e0e9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGeneratorBase.java
@@ -23,15 +23,15 @@ import java.util.Collection;
 import java.util.Date;
 import java.util.List;
 
-import org.apache.ambari.logsearch.manager.MgrBase;
+import org.apache.ambari.logsearch.manager.ManagerBase;
+import org.apache.ambari.logsearch.model.response.BarGraphData;
+import org.apache.ambari.logsearch.model.response.NameValueData;
 import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.view.VBarGraphData;
-import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.commons.lang.StringUtils;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 
-class GraphDataGeneratorBase extends MgrBase {
+class GraphDataGeneratorBase {
 
   private static final String BUCKETS = "buckets";
   
@@ -62,19 +62,19 @@ class GraphDataGeneratorBase extends MgrBase {
 
   @SuppressWarnings("unchecked")
   protected void extractRangeStackValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
-      String innerField, List<VBarGraphData> histogramData) {
+      String innerField, List<BarGraphData> histogramData) {
     if (jsonFacetResponse != null) {
       NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse.get(outerField);
       if (stack != null) {
         ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get(BUCKETS);
         if (stackBuckets != null) {
           for (Object stackBucket : stackBuckets) {
-            VBarGraphData vBarGraphData = new VBarGraphData();
+            BarGraphData vBarGraphData = new BarGraphData();
             SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) stackBucket;
             if (level != null) {
               String name = level.getVal(0) != null ? level.getVal(0).toString().toUpperCase() : "";
               vBarGraphData.setName(name);
-              Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
+              Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
               NamedList<Object> innerFiledValue = (NamedList<Object>) level.get(innerField);
               if (innerFiledValue != null) {
                 ArrayList<Object> levelBuckets = (ArrayList<Object>) innerFiledValue.get(BUCKETS);
@@ -84,13 +84,15 @@ class GraphDataGeneratorBase extends MgrBase {
                     if (countValue != null) {
                       String innerName = DateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
                       String innerValue = countValue.getVal(1) != null ? countValue.getVal(1).toString() : "";
-                      VNameValue vNameValue = new VNameValue(innerName, innerValue);
+                      NameValueData vNameValue = new NameValueData();
+                      vNameValue.setName(innerName);
+                      vNameValue.setValue(innerValue);
                       vNameValues.add(vNameValue);
                     }
                   }
                 }
               }
-              vBarGraphData.setDataCounts(vNameValues);
+              vBarGraphData.setDataCount(vNameValues);
             }
             histogramData.add(vBarGraphData);
           }
@@ -101,7 +103,7 @@ class GraphDataGeneratorBase extends MgrBase {
 
   @SuppressWarnings("unchecked")
   protected boolean extractNonRangeStackValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String level,
-      Collection<VBarGraphData> vGraphDatas, String typeXAxis) {
+      Collection<BarGraphData> vGraphDatas, String typeXAxis) {
     boolean zeroFlag = true;
     if (jsonFacetResponse == null || jsonFacetResponse.get(level) == null
         || jsonFacetResponse.get(level).toString().equals("{count=0}")) {
@@ -114,11 +116,11 @@ class GraphDataGeneratorBase extends MgrBase {
         for (int index = 0; index < bucketList.size(); index++) {
           SimpleOrderedMap<Object> valueCount = (SimpleOrderedMap<Object>) bucketList.get(index);
           if (valueCount != null && valueCount.size() > 2) {
-            VBarGraphData vGraphData = new VBarGraphData();
-            Collection<VNameValue> levelCounts = new ArrayList<VNameValue>();
+            BarGraphData vGraphData = new BarGraphData();
+            Collection<NameValueData> levelCounts = new ArrayList<NameValueData>();
             String name = valueCount.getVal(0) != null ? valueCount.getVal(0).toString().trim() : "";
             if (isTypeNumber(typeXAxis)) {
-              VNameValue nameValue = new VNameValue();
+              NameValueData nameValue = new NameValueData();
               Double sumValue = (Double) valueCount.getVal(2);
               String value = "0";// default is zero
               if (sumValue != null) {
@@ -137,7 +139,9 @@ class GraphDataGeneratorBase extends MgrBase {
                     if (innerValueCount != null) {
                       String innerName = innerValueCount.getVal(0) != null ? innerValueCount.getVal(0).toString().trim() : "";
                       String innerValue = innerValueCount.getVal(1) != null ? innerValueCount.getVal(1).toString().trim() : "";
-                      VNameValue nameValue = new VNameValue(innerName, innerValue);
+                      NameValueData nameValue = new NameValueData();
+                      nameValue.setValue(innerValue);
+                      nameValue.setName(innerName);
                       levelCounts.add(nameValue);
                     }
                   }
@@ -145,7 +149,7 @@ class GraphDataGeneratorBase extends MgrBase {
               }
             }
             vGraphData.setName(name);
-            vGraphData.setDataCounts(levelCounts);
+            vGraphData.setDataCount(levelCounts);
             vGraphDatas.add(vGraphData);
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
new file mode 100644
index 0000000..7affc5a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditLogsManager.java
@@ -0,0 +1,597 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import javax.inject.Inject;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.ambari.logsearch.common.ConfigHelper;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.ManageStartEndTime;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.conf.SolrAuditLogConfig;
+import org.apache.ambari.logsearch.dao.AuditSolrDao;
+import org.apache.ambari.logsearch.graph.GraphDataGenerator;
+import org.apache.ambari.logsearch.model.response.AuditLogResponse;
+import org.apache.ambari.logsearch.model.response.BarGraphData;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.GroupListResponse;
+import org.apache.ambari.logsearch.model.response.LogData;
+import org.apache.ambari.logsearch.model.response.LogSearchResponse;
+import org.apache.ambari.logsearch.model.response.NameValueData;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.solr.model.SolrAuditLogData;
+import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
+import org.apache.ambari.logsearch.util.BizUtil;
+import org.apache.ambari.logsearch.util.DateUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.ambari.logsearch.view.VResponse;
+import org.apache.ambari.logsearch.query.model.AuditLogSearchCriteria;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.FacetField;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.RangeFacet;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AuditLogsManager extends ManagerBase<SolrAuditLogData, AuditLogResponse> {
+  private static final Logger logger = Logger.getLogger(AuditLogsManager.class);
+
+  @Inject
+  private AuditSolrDao auditSolrDao;
+  @Inject
+  private GraphDataGenerator graphDataGenerator;
+  @Inject
+  private SolrAuditLogConfig solrAuditLogConfig;
+
+  public AuditLogResponse getLogs(AuditLogSearchCriteria searchCriteria) {
+    Boolean isLastPage = (Boolean) searchCriteria.getParamValue("isLastPage");
+    if (isLastPage) {
+      SolrQuery lastPageQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+      LogSearchResponse logResponse = getLastPage(searchCriteria, LogSearchConstants.AUDIT_EVTTIME, auditSolrDao, lastPageQuery);
+      if (logResponse == null) {
+        logResponse = new AuditLogResponse();
+      }
+      return (AuditLogResponse) logResponse;
+    }
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    return getLogAsPaginationProvided(solrQuery, auditSolrDao);
+  }
+
+  private List<LogData> getComponents(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    List<LogData> docList = new ArrayList<>();
+    try {
+      SolrUtil.setFacetField(solrQuery, LogSearchConstants.AUDIT_COMPONENT);
+      SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+      List<FacetField> facetFields = null;
+      List<Count> componentsCount = new ArrayList<Count>();
+      FacetField facetField = null;
+
+      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if (queryResponse == null) {
+        return docList;
+      }
+
+      facetFields = queryResponse.getFacetFields();
+      if (facetFields == null) {
+        return docList;
+      }
+      if (!facetFields.isEmpty()) {
+        facetField = facetFields.get(0);
+      }
+      if (facetField != null) {
+        componentsCount = facetField.getValues();
+      }
+
+      for (Count component : componentsCount) {
+        SolrComponentTypeLogData logData = new SolrComponentTypeLogData();
+        logData.setType(component.getName());
+        docList.add(logData);
+      }
+      return docList;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public GroupListResponse getAuditComponents(SearchCriteria searchCriteria) {
+    GroupListResponse componentResponse = new GroupListResponse();
+    List<LogData> docList = getComponents(searchCriteria);
+    componentResponse.setGroupList(docList);
+    return componentResponse;
+  }
+
+  @SuppressWarnings("unchecked")
+  public BarGraphDataListResponse getAuditBarGraphData(SearchCriteria searchCriteria) {
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+
+    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
+    String to = getTo((String) searchCriteria.getParamValue("endTime"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+
+    List<BarGraphData> histogramData = new ArrayList<BarGraphData>();
+    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_COMPONENT,
+      LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
+
+    try {
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery, 0);
+      QueryResponse response = auditSolrDao.process(solrQuery);
+      if (response == null) {
+        return dataList;
+      }
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
+
+      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
+        return dataList;
+      }
+
+      extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
+
+      dataList.setGraphData(histogramData);
+      return dataList;
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error(e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+
+    }
+  }
+
+  @SuppressWarnings({"unchecked", "rawtypes"})
+  public NameValueDataListResponse getLiveLogCounts() {
+    NameValueDataListResponse nameValueList = new NameValueDataListResponse();
+    SolrQuery solrQuery = new SolrQuery();
+    solrQuery.setParam("event", "/audit/logs/live/count");
+    try {
+      Date[] timeRange = ManageStartEndTime.getStartEndTime();
+      String startDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[0]);
+      String endDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[1]);
+
+      SolrUtil.setMainQuery(solrQuery, null);
+      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startDate, endDate, "+2MINUTE");
+      List<RangeFacet.Count> listCount;
+
+      QueryResponse response = auditSolrDao.process(solrQuery);
+
+      List<RangeFacet> rangeFacet = response.getFacetRanges();
+      if (rangeFacet == null) {
+        return nameValueList;
+      }
+      RangeFacet range = rangeFacet.get(0);
+
+      if (range == null) {
+        return nameValueList;
+      }
+
+      listCount = range.getCounts();
+
+      List<NameValueData> nameValues = new ArrayList<>();
+      int count = 0;
+      for (RangeFacet.Count cnt : listCount) {
+        NameValueData nameValue = new NameValueData();
+        nameValue.setName("" + count);
+        nameValue.setValue("" + cnt.getCount());
+        nameValues.add(nameValue);
+        count++;
+      }
+      nameValueList.setvNameValues(nameValues);
+      return nameValueList;
+
+    } catch (SolrException | SolrServerException | ParseException
+      | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public BarGraphDataListResponse topTenUsers(SearchCriteria searchCriteria) {
+
+    String jsonUserQuery =
+      "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
+    try {
+      BarGraphDataListResponse barGraphDataListResponse = new BarGraphDataListResponse();
+      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if (queryResponse == null) {
+        return barGraphDataListResponse;
+      }
+
+      NamedList<Object> namedList = queryResponse.getResponse();
+
+      if (namedList == null) {
+        return barGraphDataListResponse;
+      }
+
+      @SuppressWarnings("unchecked")
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
+      if (jsonFacetResponse == null) {
+        return barGraphDataListResponse;
+      }
+      if (jsonFacetResponse.toString().equals("{count=0}")) {
+        return barGraphDataListResponse;
+      }
+      barGraphDataListResponse = BizUtil.buildSummaryForTopCounts(jsonFacetResponse, "Repo", "Users");
+      return barGraphDataListResponse;
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error("Error during solrQuery=" + e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public BarGraphDataListResponse topTenResources(SearchCriteria searchCriteria) {
+
+    String jsonUserQuery =
+      "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
+    try {
+      BarGraphDataListResponse barGraphDataListResponse = new BarGraphDataListResponse();
+      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if (queryResponse == null) {
+        return barGraphDataListResponse;
+      }
+
+      NamedList<Object> namedList = queryResponse.getResponse();
+      if (namedList == null) {
+        return barGraphDataListResponse;
+      }
+
+      @SuppressWarnings("unchecked")
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
+
+      barGraphDataListResponse = BizUtil.buildSummaryForTopCounts(jsonFacetResponse, "Repo", "Users");
+      return barGraphDataListResponse;
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public BarGraphDataListResponse getRequestUserLineGraph(SearchCriteria searchCriteria) {
+
+    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
+    String to = getTo((String) searchCriteria.getParamValue("endTime"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    List<BarGraphData> histogramData = new ArrayList<BarGraphData>();
+
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+
+    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_REQUEST_USER,
+      LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
+
+    try {
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery, 0);
+      QueryResponse response = auditSolrDao.process(solrQuery);
+      if (response == null) {
+        return dataList;
+      }
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
+
+      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
+        return dataList;
+      }
+      extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
+
+      dataList.setGraphData(histogramData);
+      return dataList;
+
+    } catch (SolrException | IOException | SolrServerException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+  }
+
+  public String getAuditLogsSchemaFieldsName() {
+    String excludeArray[] = Arrays.copyOf(solrAuditLogConfig.getExcludeColumnList().toArray(),
+      solrAuditLogConfig.getExcludeColumnList().size(), String[].class);
+    List<String> fieldNames = new ArrayList<String>();
+    HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
+    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames, auditSolrDao);
+
+    for (String fieldName : fieldNames) {
+      String uiField = solrAuditLogConfig.getSolrAndUiColumns().get(fieldName + LogSearchConstants.SOLR_SUFFIX);
+      if (uiField == null) {
+        uiFieldColumnMapping.put(fieldName, fieldName);
+      } else {
+        uiFieldColumnMapping.put(fieldName, uiField);
+      }
+    }
+
+    uiFieldColumnMapping = BizUtil.sortHashMapByValues(uiFieldColumnMapping);
+    return convertObjToString(uiFieldColumnMapping);
+
+  }
+
+  public BarGraphDataListResponse getAnyGraphData(SearchCriteria searchCriteria) {
+    searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    BarGraphDataListResponse result = graphDataGenerator.getAnyGraphData(searchCriteria, auditSolrDao, solrQuery);
+    if (result == null) {
+      result = new BarGraphDataListResponse();
+    }
+    return result;
+
+  }
+
+  @SuppressWarnings("unchecked")
+  private void extractValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String outerField, String innerField,
+                                       List<BarGraphData> histogramData) {
+    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse.get(outerField);
+    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get("buckets");
+    for (Object temp : stackBuckets) {
+      BarGraphData vBarGraphData = new BarGraphData();
+
+      SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) temp;
+      String name = ((String) level.getVal(0)).toUpperCase();
+      vBarGraphData.setName(name);
+
+      Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
+      vBarGraphData.setDataCount(vNameValues);
+      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level.get(innerField)).get("buckets");
+      for (Object temp1 : levelBuckets) {
+        SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
+        String value = DateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
+
+        String count = "" + countValue.getVal(1);
+        NameValueData vNameValue = new NameValueData();
+        vNameValue.setName(value);
+        vNameValue.setValue(count);
+        vNameValues.add(vNameValue);
+      }
+      histogramData.add(vBarGraphData);
+    }
+  }
+
+  @SuppressWarnings({"unchecked"})
+  public Response exportUserTableToTextFile(SearchCriteria searchCriteria) {
+    String jsonUserQuery =
+      "{ Users: { type: terms, field: reqUser, facet:  {Repo: {  type: terms, field: repo, facet: {  eventCount: \"sum(event_count)\"}}}},x:{ type: terms,field: resource, facet: {y: {  type: terms, field: repo,facet: {  eventCount: \"sum(event_count)\"}}}}}";
+
+    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+    String startTime = (String) searchCriteria.getParamValue("startTime");
+    String endTime = (String) searchCriteria.getParamValue("endTime");
+
+    startTime = startTime == null ? "" : startTime;
+    endTime = endTime == null ? "" : "_" + endTime;
+
+    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
+    SolrUtil.setRowCount(solrQuery, 0);
+
+    String dataFormat = (String) searchCriteria.getParamValue("format");
+    FileOutputStream fis = null;
+    try {
+      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
+      if (queryResponse == null) {
+        VResponse response = new VResponse();
+        response.setMsgDesc("Query was not able to execute " + solrQuery);
+        throw RESTErrorUtil.createRESTException(response);
+      }
+
+      NamedList<Object> namedList = queryResponse.getResponse();
+      if (namedList == null) {
+        VResponse response = new VResponse();
+        response.setMsgDesc("Query was not able to execute " + solrQuery);
+        throw RESTErrorUtil.createRESTException(response);
+      }
+      BarGraphDataListResponse vBarUserDataList = new BarGraphDataListResponse();
+      BarGraphDataListResponse vBarResourceDataList = new BarGraphDataListResponse();
+
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
+      vBarUserDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse, "Repo", "Users");
+      vBarResourceDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse, "y", "x");
+      String data = "";
+      String summary = "";
+      if ("text".equals(dataFormat)) {
+        int users = 0;
+        int resources = 0;
+        summary += "\n\n\n\n";
+        data += addBlank("Users") + "Components/Access" + "\n";
+        data += "--------------------------------------------------------------------------\n";
+        Collection<BarGraphData> tableUserData = vBarUserDataList.getGraphData();
+        for (BarGraphData graphData : tableUserData) {
+          String userName = graphData.getName();
+          String largeUserName = "";
+
+          if (userName.length() > 45) {
+            largeUserName = userName.substring(0, 45);
+            data += addBlank(largeUserName);
+          } else
+            data += addBlank(userName);
+
+          Collection<NameValueData> vnameValueList = graphData.getDataCount();
+          int count = 0;
+          String blank = "";
+          for (NameValueData vNameValue : vnameValueList) {
+            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
+            if (count == 0)
+              blank = addBlank(blank);
+            count++;
+
+          }
+          while (largeUserName.length() > 0) {
+            data += largeUserName.substring(0, 45) + "\n";
+          }
+
+          users += 1;
+        }
+        data += "\n\n\n\n\n\n";
+        data += addBlank("Resources") + "Components/Access" + "\n";
+        data += "--------------------------------------------------------------------------\n";
+        Collection<BarGraphData> tableResourceData = vBarResourceDataList.getGraphData();
+        for (BarGraphData graphData : tableResourceData) {
+          String resourceName = graphData.getName();
+          String largeResourceName = resourceName;
+          if (largeResourceName.length() > 45) {
+            resourceName = largeResourceName.substring(0, 45);
+            largeResourceName = largeResourceName.substring(45, largeResourceName.length());
+          } else {
+            largeResourceName = "";
+          }
+
+          //resourceName = resourceName.replaceAll("(.{45})", resourceName.substring(0, 45)+"\n");
+          data += addBlank(resourceName);
+          Collection<NameValueData> vnameValueList = graphData.getDataCount();
+          int count = 0;
+          String blank = "";
+          for (NameValueData vNameValue : vnameValueList) {
+            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
+            if (count == 0)
+              blank = addBlank(blank);
+            count++;
+          }
+          String tempLargeResourceName = largeResourceName;
+          while (largeResourceName.length() > 45) {
+            largeResourceName = tempLargeResourceName.substring(0, 45);
+            tempLargeResourceName = tempLargeResourceName.substring(45, tempLargeResourceName.length());
+            data += largeResourceName + "\n";
+          }
+          if (largeResourceName.length() < 45 && !largeResourceName.isEmpty()) {
+            data += largeResourceName + "\n";
+          }
+          resources += 1;
+        }
+        String header = "--------------------------------SUMMARY-----------------------------------\n";
+        summary = header + "Users  = " + users + "\nResources  = " + resources + "\n" + summary;
+        data = summary + data;
+      } else {
+        data = "{" + convertObjToString(vBarUserDataList) + "," + convertObjToString(vBarResourceDataList) + "}";
+        dataFormat = "json";
+      }
+      String fileName = "Users_Resource" + startTime + endTime + ".";
+      File file = File.createTempFile(fileName, dataFormat);
+
+      fis = new FileOutputStream(file);
+      fis.write(data.getBytes());
+      return Response
+        .ok(file, MediaType.APPLICATION_OCTET_STREAM)
+        .header("Content-Disposition", "attachment;filename=" + fileName + dataFormat)
+        .build();
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error("Error during solrQuery=" + e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    } finally {
+      if (fis != null) {
+        try {
+          fis.close();
+        } catch (IOException e) {
+        }
+      }
+    }
+  }
+
+  private String addBlank(String field) {
+    int blanks = 50;
+    int strSize = field.length();
+    String fieldWithBlank = field;
+    for (int i = 0; i < blanks - strSize; i++) {
+      fieldWithBlank += " ";
+    }
+    return fieldWithBlank;
+  }
+
+  public BarGraphDataListResponse getServiceLoad(SearchCriteria searchCriteria) {
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    Collection<BarGraphData> vaDatas = new ArrayList<BarGraphData>();
+    dataList.setGraphData(vaDatas);
+
+    SolrQuery serivceLoadQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
+
+    try {
+      SolrUtil.setFacetField(serivceLoadQuery, LogSearchConstants.AUDIT_COMPONENT);
+      QueryResponse serviceLoadResponse = auditSolrDao.process(serivceLoadQuery);
+      if (serviceLoadResponse == null) {
+        return dataList;
+      }
+      FacetField serviceFacetField = serviceLoadResponse.getFacetField(LogSearchConstants.AUDIT_COMPONENT);
+      if (serviceFacetField == null) {
+        return dataList;
+      }
+
+      List<Count> serviceLoadFacets = serviceFacetField.getValues();
+      if (serviceLoadFacets == null) {
+        return dataList;
+      }
+      for (Count cnt : serviceLoadFacets) {
+        List<NameValueData> valueList = new ArrayList<NameValueData>();
+        BarGraphData vBarGraphData = new BarGraphData();
+        vaDatas.add(vBarGraphData);
+        NameValueData vNameValue = new NameValueData();
+        vNameValue.setName(cnt.getName());
+        vBarGraphData.setName(cnt.getName().toUpperCase());
+        vNameValue.setValue("" + cnt.getCount());
+        valueList.add(vNameValue);
+        vBarGraphData.setDataCount(valueList);
+      }
+
+      return dataList;
+
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  @Override
+  protected List<SolrAuditLogData> convertToSolrBeans(QueryResponse response) {
+    return response.getBeans(SolrAuditLogData.class);
+  }
+
+  @Override
+  protected AuditLogResponse createLogSearchResponse() {
+    return new AuditLogResponse();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
deleted file mode 100644
index 172ec81..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ /dev/null
@@ -1,630 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.manager;
-
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.ambari.logsearch.common.ConfigHelper;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.ManageStartEndTime;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.common.SearchCriteria;
-import org.apache.ambari.logsearch.dao.AuditSolrDao;
-import org.apache.ambari.logsearch.graph.GraphDataGenerator;
-import org.apache.ambari.logsearch.util.BizUtil;
-import org.apache.ambari.logsearch.util.DateUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.view.VBarDataList;
-import org.apache.ambari.logsearch.view.VBarGraphData;
-import org.apache.ambari.logsearch.view.VGroupList;
-import org.apache.ambari.logsearch.view.VNameValue;
-import org.apache.ambari.logsearch.view.VNameValueList;
-import org.apache.ambari.logsearch.view.VResponse;
-import org.apache.ambari.logsearch.view.VSolrLogList;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.response.FacetField;
-import org.apache.solr.client.solrj.response.FacetField.Count;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.client.solrj.response.RangeFacet;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.stereotype.Component;
-
-@Component
-public class AuditMgr extends MgrBase {
-  private static final Logger logger = Logger.getLogger(AuditMgr.class); 
-
-  @Autowired
-  private AuditSolrDao auditSolrDao;
-  @Autowired
-  private GraphDataGenerator graphDataGenerator;
-
-  public String getLogs(SearchCriteria searchCriteria) {
-    String lastPage = (String)  searchCriteria.getParamValue("isLastPage");
-    Boolean isLastPage = Boolean.parseBoolean(lastPage);
-     if (isLastPage) {
-       SolrQuery lastPageQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-      VSolrLogList collection = getLastPage(searchCriteria, LogSearchConstants.AUDIT_EVTTIME, auditSolrDao, lastPageQuery);
-      if(collection == null){
-        collection = new VSolrLogList();
-      }
-      return convertObjToString(collection);
-    }
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    VSolrLogList collection = getLogAsPaginationProvided(solrQuery, auditSolrDao);
-    return convertObjToString(collection);
-
-  }
-
-  private SolrDocumentList getComponents(SearchCriteria searchCriteria) {
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    SolrDocumentList docList = new SolrDocumentList();
-    try {
-      SolrUtil.setFacetField(solrQuery, LogSearchConstants.AUDIT_COMPONENT);
-      SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-      List<FacetField> facetFields = null;
-      List<Count> componentsCount = new ArrayList<Count>();
-      FacetField facetField = null;
-
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if (queryResponse == null) {
-        return docList;
-      }
-
-      facetFields = queryResponse.getFacetFields();
-      if (facetFields == null) {
-        return docList;
-      }
-      if (!facetFields.isEmpty()) {
-        facetField = facetFields.get(0);
-      }
-      if (facetField != null) {
-        componentsCount = facetField.getValues();
-      }
-    
-      for (Count compnonet : componentsCount) {
-        SolrDocument solrDocument = new SolrDocument();
-        solrDocument.addField("type", compnonet.getName());
-        docList.add(solrDocument);
-      }
-      return docList;
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public String getAuditComponents(SearchCriteria searchCriteria) {
-    VGroupList vGroupList = new VGroupList();
-    SolrDocumentList docList = getComponents(searchCriteria);
-
-    vGroupList.setGroupDocuments(docList);
-    return convertObjToString(vGroupList);
-  }
-
-  @SuppressWarnings("unchecked")
-  public String getAuditLineGraphData(SearchCriteria searchCriteria) {
-    VBarDataList dataList = new VBarDataList();
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-
-    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
-    String to = getTo((String) searchCriteria.getParamValue("endTime"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
-
-    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_COMPONENT,
-      LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
-
-    try {
-      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
-      SolrUtil.setRowCount(solrQuery, 0);
-      QueryResponse response = auditSolrDao.process(solrQuery);
-      if (response == null){
-        return convertObjToString(dataList);
-      }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
-
-      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
-        return convertObjToString(dataList);
-      }
-
-      extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
-
-      dataList.setGraphData(histogramData);
-      return convertObjToString(dataList);
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error(e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-
-    }
-  }
-
-  public String getTopAuditFieldCount(SearchCriteria searchCriteria) {
-    int topCounts = 10;
-    Integer top = (Integer) searchCriteria.getParamValue("top");
-    String facetField = (String) searchCriteria.getParamValue("field");
-    if (top == null){
-      top = new Integer(topCounts);
-    }
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    try {
-
-      List<VNameValue> nameValues = new ArrayList<VNameValue>();
-
-      VNameValueList nameValueList = new VNameValueList(nameValues);
-
-      SolrUtil.setFacetField(solrQuery, facetField);
-      SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_COUNT);
-      SolrUtil.setFacetLimit(solrQuery, top.intValue());
-
-      List<Count> countList = new ArrayList<FacetField.Count>();
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if (queryResponse == null) {
-        return convertObjToString(nameValueList);
-      }
-      
-      if (queryResponse.getFacetField(facetField) != null) {
-        FacetField queryFacetField = queryResponse.getFacetField(facetField);
-        if (queryFacetField != null) {
-          countList = queryFacetField.getValues();
-        }
-      }
-
-      for (Count cnt : countList) {
-        VNameValue nameValue = new VNameValue();
-        nameValue.setName(cnt.getName());
-
-        nameValue.setValue("" + cnt.getCount());
-        nameValues.add(nameValue);
-      }
-      return convertObjToString(nameValueList);
-
-    } catch (SolrException | IOException | SolrServerException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  @SuppressWarnings({ "unchecked", "rawtypes" })
-  public String getLiveLogCounts() {
-    VNameValueList nameValueList = new VNameValueList();
-    SolrQuery solrQuery = new SolrQuery();
-    solrQuery.setParam("event", "/audit/logs/live/count");
-    try {
-      Date[] timeRange = ManageStartEndTime.getStartEndTime();
-      String startDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[0]);
-      String endDate = DateUtil.convertGivenDateFormatToSolrDateFormat(timeRange[1]);
-
-      SolrUtil.setMainQuery(solrQuery, null);
-      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startDate, endDate, "+2MINUTE");
-      List<RangeFacet.Count> listCount;
-
-      QueryResponse response = auditSolrDao.process(solrQuery);
- 
-      List<RangeFacet> rangeFacet = response.getFacetRanges();
-      if (rangeFacet == null){
-        return convertObjToString(nameValueList);
-      }
-      RangeFacet range=rangeFacet.get(0);
-      
-      if(range == null){
-        return convertObjToString(nameValueList);
-      }
-      
-      listCount = range.getCounts();
-
-      List<VNameValue> nameValues = new ArrayList<VNameValue>();
-      int count = 0;
-      for (RangeFacet.Count cnt : listCount) {
-        VNameValue nameValue = new VNameValue();
-        nameValue.setName("" + count);
-        nameValue.setValue("" + cnt.getCount());
-        nameValues.add(nameValue);
-        count++;
-      }
-      nameValueList.setVNameValues(nameValues);
-      return convertObjToString(nameValueList);
-
-    } catch (SolrException | SolrServerException | ParseException
-      | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public String topTenUsers(SearchCriteria searchCriteria) {
-
-    String jsonUserQuery =
-        "{Users:{type:terms, field:reqUser, facet:{ Repo:{ type:terms, field:repo, facet:{eventCount:\"sum(event_count)\"}}}}}";
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
-    SolrUtil.setRowCount(solrQuery, 0);
-    try {
-      VBarDataList vBarDataList = new VBarDataList();
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if (queryResponse == null) {
-        return convertObjToString(vBarDataList);
-      }
-
-      NamedList<Object> namedList = queryResponse.getResponse();
-      
-      if (namedList == null) {
-        return convertObjToString(vBarDataList);
-      }
-
-      @SuppressWarnings("unchecked")
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
-      if (jsonFacetResponse == null) {
-        return convertObjToString(vBarDataList);
-      }
-      if (jsonFacetResponse.toString().equals("{count=0}")) {
-        return convertObjToString(vBarDataList);
-      }
-      vBarDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
-      return convertObjToString(vBarDataList);
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error("Error during solrQuery=" + e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  public String topTenResources(SearchCriteria searchCriteria) {
-
-    String jsonUserQuery =
-        "{Users:{type:terms,field:resource,facet:{Repo:{type:terms,field:repo,facet:{eventCount:\"sum(event_count)\"}}}}}";
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
-    SolrUtil.setRowCount(solrQuery, 0);
-    try {
-      VBarDataList vBarDataList = new VBarDataList();
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if(queryResponse == null){
-        return convertObjToString(vBarDataList);
-      }
-
-      NamedList<Object> namedList = queryResponse.getResponse();
-      if (namedList == null) {
-        return convertObjToString(vBarDataList);
-      }
-
-      @SuppressWarnings("unchecked")
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
-
-      vBarDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
-      return convertObjToString(vBarDataList);
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  public String getRequestUserLineGraph(SearchCriteria searchCriteria) {
-
-    String from = getFrom((String) searchCriteria.getParamValue("startTime"));
-    String to = getTo((String) searchCriteria.getParamValue("endTime"));
-    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
-    
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-
-    VBarDataList dataList = new VBarDataList();
-    List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();
-
-    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
-
-    String jsonHistogramQuery = queryGenerator.buildJSONFacetTermTimeRangeQuery(LogSearchConstants.AUDIT_REQUEST_USER,
-        LogSearchConstants.AUDIT_EVTTIME, from, to, unit).replace("\\", "");
-
-    try {
-      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
-      SolrUtil.setRowCount(solrQuery, 0);
-      QueryResponse response = auditSolrDao.process(solrQuery);
-      if (response == null){
-        return convertObjToString(dataList);
-      }
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response.getResponse().get("facets");
-
-      if (jsonFacetResponse == null || jsonFacetResponse.toString().equals("{count=0}")) {
-        return convertObjToString(dataList);
-      }
-      extractValuesFromBucket(jsonFacetResponse, "x", "y", histogramData);
-
-      dataList.setGraphData(histogramData);
-      return convertObjToString(dataList);
-
-    } catch (SolrException | IOException | SolrServerException e) {
-      logger.error("Error during solrQuery=" + solrQuery, e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-  }
-
-  public String getAuditLogsSchemaFieldsName() {
-    String excludeArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
-    List<String> fieldNames = new ArrayList<String>();
-    HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
-    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames,auditSolrDao);
-
-    for (String fieldName : fieldNames) {
-      String uiField = ConfigHelper.auditLogsColumnMapping.get(fieldName + LogSearchConstants.SOLR_SUFFIX);
-      if (uiField == null) {
-        uiFieldColumnMapping.put(fieldName, fieldName);
-      } else {
-        uiFieldColumnMapping.put(fieldName, uiField);
-      }
-    }
-
-    uiFieldColumnMapping = BizUtil.sortHashMapByValues(uiFieldColumnMapping);
-    return convertObjToString(uiFieldColumnMapping);
-
-  }
-
-  public String getAnyGraphData(SearchCriteria searchCriteria) {
-    searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria, auditSolrDao, solrQuery);
-    if (result == null) {
-      result = new VBarDataList();
-    }
-    return convertObjToString(result);
-
-  }
-
-  @SuppressWarnings("unchecked")
-  private void extractValuesFromBucket(SimpleOrderedMap<Object> jsonFacetResponse, String outerField, String innerField,
-      List<VBarGraphData> histogramData) {
-    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse.get(outerField);
-    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack.get("buckets");
-    for (Object temp : stackBuckets) {
-      VBarGraphData vBarGraphData = new VBarGraphData();
-
-      SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) temp;
-      String name = ((String) level.getVal(0)).toUpperCase();
-      vBarGraphData.setName(name);
-
-      Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
-      vBarGraphData.setDataCounts(vNameValues);
-      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level.get(innerField)).get("buckets");
-      for (Object temp1 : levelBuckets) {
-        SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = DateUtil.convertDateWithMillisecondsToSolrDate((Date) countValue.getVal(0));
-
-        String count = "" + countValue.getVal(1);
-        VNameValue vNameValue = new VNameValue();
-        vNameValue.setName(value);
-        vNameValue.setValue(count);
-        vNameValues.add(vNameValue);
-      }
-      histogramData.add(vBarGraphData);
-    }
-  }
-
-  @SuppressWarnings({"unchecked"})
-  public Response exportUserTableToTextFile(SearchCriteria searchCriteria) {
-    String jsonUserQuery =
-        "{ Users: { type: terms, field: reqUser, facet:  {Repo: {  type: terms, field: repo, facet: {  eventCount: \"sum(event_count)\"}}}},x:{ type: terms,field: resource, facet: {y: {  type: terms, field: repo,facet: {  eventCount: \"sum(event_count)\"}}}}}";
-
-    SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-    String startTime = (String) searchCriteria.getParamValue("startTime");
-    String endTime = (String) searchCriteria.getParamValue("endTime");
-
-    startTime = startTime == null ? "" : startTime;
-    endTime = endTime == null ? "" : "_" + endTime;
-
-    SolrUtil.setJSONFacet(solrQuery, jsonUserQuery);
-    SolrUtil.setRowCount(solrQuery, 0);
-
-    String dataFormat = (String) searchCriteria.getParamValue("format");
-    FileOutputStream fis = null;
-    try {
-      QueryResponse queryResponse = auditSolrDao.process(solrQuery);
-      if(queryResponse == null){
-        VResponse response = new VResponse();
-        response.setMsgDesc("Query was not able to execute "+solrQuery);
-        throw RESTErrorUtil.createRESTException(response);
-      }
-
-      NamedList<Object> namedList = queryResponse.getResponse();
-      if (namedList == null) {
-        VResponse response = new VResponse();
-        response.setMsgDesc("Query was not able to execute "+solrQuery);
-        throw RESTErrorUtil.createRESTException(response);
-      }
-      VBarDataList vBarUserDataList = new VBarDataList();
-      VBarDataList vBarResourceDataList = new VBarDataList();
-
-      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) namedList.get("facets");
-      vBarUserDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"Repo","Users");
-      vBarResourceDataList = BizUtil.buildSummaryForTopCounts(jsonFacetResponse,"y","x");
-      String data = "";
-      String summary = "";
-      if ("text".equals(dataFormat)) {
-        int users = 0;
-        int resources = 0;
-        summary += "\n\n\n\n";
-        data += addBlank("Users") + "Components/Access" + "\n";
-        data += "--------------------------------------------------------------------------\n";
-        Collection<VBarGraphData> tableUserData = vBarUserDataList.getGraphData();
-        for (VBarGraphData graphData : tableUserData) {
-          String userName = graphData.getName();
-          String largeUserName = "";
-
-          if (userName.length() > 45) {
-            largeUserName = userName.substring(0, 45);
-            data += addBlank(largeUserName);
-          } else
-            data += addBlank(userName);
-
-          Collection<VNameValue> vnameValueList = graphData.getDataCount();
-          int count = 0;
-          String blank = "";
-          for (VNameValue vNameValue : vnameValueList) {
-            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
-            if (count == 0)
-              blank = addBlank(blank);
-            count++;
-
-          }
-          while (largeUserName.length() > 0) {
-            data += largeUserName.substring(0, 45) + "\n";
-          }
-
-          users += 1;
-        }
-        data += "\n\n\n\n\n\n";
-        data += addBlank("Resources") + "Components/Access" + "\n";
-        data += "--------------------------------------------------------------------------\n";
-        Collection<VBarGraphData> tableResourceData = vBarResourceDataList.getGraphData();
-        for (VBarGraphData graphData : tableResourceData) {
-          String resourceName = graphData.getName();
-          String largeResourceName = resourceName;
-          if (largeResourceName.length() > 45) {
-            resourceName = largeResourceName.substring(0, 45);
-            largeResourceName = largeResourceName.substring(45, largeResourceName.length());
-          } else {
-            largeResourceName = "";
-          }
-
-          //resourceName = resourceName.replaceAll("(.{45})", resourceName.substring(0, 45)+"\n");
-          data += addBlank(resourceName);
-          Collection<VNameValue> vnameValueList = graphData.getDataCount();
-          int count = 0;
-          String blank = "";
-          for (VNameValue vNameValue : vnameValueList) {
-            data += blank + vNameValue.getName() + " " + vNameValue.getValue() + "\n";
-            if (count == 0)
-              blank = addBlank(blank);
-            count++;
-          }
-          String tempLargeResourceName = largeResourceName;
-          while (largeResourceName.length() > 45) {
-            largeResourceName = tempLargeResourceName.substring(0, 45);
-            tempLargeResourceName = tempLargeResourceName.substring(45, tempLargeResourceName.length());
-            data += largeResourceName + "\n";
-          }
-          if (largeResourceName.length() < 45 && !largeResourceName.isEmpty()) {
-            data += largeResourceName + "\n";
-          }
-          resources += 1;
-        }
-        String header = "--------------------------------SUMMARY-----------------------------------\n";
-        summary = header + "Users  = " + users + "\nResources  = " + resources + "\n" + summary;
-        data = summary + data;
-      } else {
-        data = "{" + convertObjToString(vBarUserDataList) + "," + convertObjToString(vBarResourceDataList) + "}";
-        dataFormat = "json";
-      }
-      String fileName = "Users_Resource" + startTime + endTime + ".";
-      File file = File.createTempFile(fileName, dataFormat);
-
-      fis = new FileOutputStream(file);
-      fis.write(data.getBytes());
-      return Response
-        .ok(file, MediaType.APPLICATION_OCTET_STREAM)
-        .header("Content-Disposition", "attachment;filename=" + fileName + dataFormat)
-        .build();
-
-    } catch (SolrServerException | SolrException | IOException e) {
-      logger.error("Error during solrQuery=" + e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    } finally {
-      if (fis != null) {
-        try {
-          fis.close();
-        } catch (IOException e) {
-        }
-      }
-    }
-  }
-
-  private String addBlank(String field) {
-    int blanks = 50;
-    int strSize = field.length();
-    String fieldWithBlank = field;
-    for (int i = 0; i < blanks - strSize; i++) {
-      fieldWithBlank += " ";
-    }
-    return fieldWithBlank;
-  }
-
-  public String getServiceLoad(SearchCriteria searchCriteria) {
-    VBarDataList dataList = new VBarDataList();
-    Collection<VBarGraphData> vaDatas = new ArrayList<VBarGraphData>();
-    dataList.setGraphData(vaDatas);
-
-    SolrQuery serivceLoadQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
-
-    try {
-      SolrUtil.setFacetField(serivceLoadQuery, LogSearchConstants.AUDIT_COMPONENT);
-      QueryResponse serviceLoadResponse = auditSolrDao.process(serivceLoadQuery);
-      if (serviceLoadResponse == null){
-        return convertObjToString(dataList);
-      }
-      FacetField serviceFacetField =serviceLoadResponse.getFacetField(LogSearchConstants.AUDIT_COMPONENT);
-      if (serviceFacetField == null) {
-        return convertObjToString(dataList);
-      }
-      
-      List<Count> serviceLoadFacets = serviceFacetField.getValues();
-      if (serviceLoadFacets == null) {
-        return convertObjToString(dataList);
-      }
-      for (Count cnt : serviceLoadFacets) {
-        List<VNameValue> valueList = new ArrayList<VNameValue>();
-        VBarGraphData vBarGraphData = new VBarGraphData();
-        vaDatas.add(vBarGraphData);
-        VNameValue vNameValue = new VNameValue();
-        vNameValue.setName(cnt.getName());
-        vBarGraphData.setName(cnt.getName().toUpperCase());
-        vNameValue.setValue("" + cnt.getCount());
-        valueList.add(vNameValue);
-        vBarGraphData.setDataCounts(valueList);
-      }
-
-      return convertObjToString(dataList);
-
-    } catch (SolrException | SolrServerException | IOException e) {
-      logger.error("Error during solrQuery=" + e);
-      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
new file mode 100644
index 0000000..94191e0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/JsonManagerBase.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.manager;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonParseException;
+import com.google.gson.JsonPrimitive;
+import com.google.gson.JsonSerializationContext;
+import com.google.gson.JsonSerializer;
+
+import java.util.Date;
+
+public class JsonManagerBase {
+
+  private JsonSerializer<Date> jsonDateSerialiazer = null;
+  private JsonDeserializer<Date> jsonDateDeserialiazer = null;
+
+  public JsonManagerBase() {
+    jsonDateSerialiazer = new JsonSerializer<Date>() {
+
+      @Override
+      public JsonElement serialize(Date paramT, java.lang.reflect.Type paramType, JsonSerializationContext paramJsonSerializationContext) {
+        return paramT == null ? null : new JsonPrimitive(paramT.getTime());
+      }
+    };
+
+    jsonDateDeserialiazer = new JsonDeserializer<Date>() {
+
+      @Override
+      public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT, JsonDeserializationContext context)
+        throws JsonParseException {
+        return json == null ? null : new Date(json.getAsLong());
+      }
+
+    };
+  }
+
+  protected String convertObjToString(Object obj) {
+    if (obj == null) {
+      return "";
+    }
+
+    Gson gson = new GsonBuilder()
+      .registerTypeAdapter(Date.class, jsonDateSerialiazer)
+      .registerTypeAdapter(Date.class, jsonDateDeserialiazer).create();
+
+    return gson.toJson(obj);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileManager.java
new file mode 100644
index 0000000..405eaef
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogFileManager.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.manager;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.model.response.LogFileData;
+import org.apache.ambari.logsearch.model.response.LogFileDataListResponse;
+import org.apache.ambari.logsearch.model.response.LogListResponse;
+import org.apache.ambari.logsearch.model.response.ServiceLogData;
+import org.apache.ambari.logsearch.model.response.ServiceLogResponse;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.ambari.logsearch.dao.AuditSolrDao;
+import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.FacetField;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrException;
+import org.springframework.stereotype.Component;
+
+import javax.inject.Inject;
+
+
+@Component
+public class LogFileManager extends ManagerBase<ServiceLogData, ServiceLogResponse> {
+
+  private static final Logger logger = Logger.getLogger(LogFileManager.class);
+
+  @Inject
+  private ServiceLogsSolrDao serviceLogsSolrDao;
+  @Inject
+  private AuditSolrDao auditSolrDao;
+
+  public LogFileDataListResponse searchLogFiles(SearchCriteria searchCriteria) {
+    LogFileDataListResponse logFileList = new LogFileDataListResponse();
+    List<LogFileData> logFiles = new ArrayList<LogFileData>();
+    String componentName = (String) searchCriteria.getParamValue("component");
+    String host = (String) searchCriteria.getParamValue("host");
+    int minCount = 1;// to remove zero count facet
+    SolrQuery solrQuery = new SolrQuery();
+    SolrUtil.setMainQuery(solrQuery, null);
+    SolrUtil.setFacetFieldWithMincount(solrQuery, LogSearchConstants.SOLR_PATH, minCount);
+    // adding filter
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, componentName);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, host);
+    try {
+      String logType = (String) searchCriteria.getParamValue("logType");
+      if (StringUtils.isBlank(logType)) {
+        logType = LogType.SERVICE.name();// default is service Log
+      }
+      SolrDaoBase daoMgr = null;
+      if (logType.equalsIgnoreCase(LogType.SERVICE.name())) {
+        daoMgr = serviceLogsSolrDao;
+      } else if (logType.equalsIgnoreCase(LogType.AUDIT.name())) {
+        daoMgr = auditSolrDao;
+      } else {
+        throw RESTErrorUtil.createRESTException(logType + " is not a valid logType", MessageEnums.INVALID_INPUT_DATA);
+      }
+      QueryResponse queryResponse = daoMgr.process(solrQuery);
+      if (queryResponse.getFacetField(LogSearchConstants.SOLR_PATH) != null) {
+        FacetField queryFacetField = queryResponse.getFacetField(LogSearchConstants.SOLR_PATH);
+        if (queryFacetField != null) {
+          List<Count> countList = queryFacetField.getValues();
+          for (Count count : countList) {
+            LogFileData vLogFile = new LogFileData();
+            String filePath = count.getName();
+            String fileName = FilenameUtils.getName(filePath);
+            vLogFile.setPath(filePath);
+            vLogFile.setName(fileName);
+            logFiles.add(vLogFile);
+          }
+        }
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error in solr query  :" + e.getLocalizedMessage() + "\n Query :" + solrQuery.toQueryString(), e.getCause());
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+    logFileList.setLogFiles(logFiles);
+
+    return logFileList;
+  }
+
+  public LogListResponse getLogFileTail(SearchCriteria searchCriteria) {
+    String host = (String) searchCriteria.getParamValue("host");
+    String logFile = (String) searchCriteria.getParamValue("name");
+    String component = (String) searchCriteria.getParamValue("component");
+    String tailSize = (String) searchCriteria.getParamValue("tailSize");
+    if (StringUtils.isBlank(host)) {
+      throw RESTErrorUtil.createRESTException("missing Host Name", MessageEnums.ERROR_SYSTEM);
+    }
+    tailSize = (StringUtils.isBlank(tailSize)) ? "10" : tailSize;
+    SolrQuery logFileTailQuery = new SolrQuery();
+    try {
+      int tail = Integer.parseInt(tailSize);
+      tail = tail > 100 ? 100 : tail;
+      SolrUtil.setMainQuery(logFileTailQuery, null);
+      queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_HOST, host);
+      if (!StringUtils.isBlank(logFile)) {
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_PATH, SolrUtil.makeSolrSearchString(logFile));
+      } else if (!StringUtils.isBlank(component)) {
+        queryGenerator.setSingleIncludeFilter(logFileTailQuery, LogSearchConstants.SOLR_COMPONENT, component);
+      } else {
+        throw RESTErrorUtil.createRESTException("component or logfile parameter must be present", MessageEnums.ERROR_SYSTEM);
+      }
+
+      SolrUtil.setRowCount(logFileTailQuery, tail);
+      queryGenerator.setSortOrderDefaultServiceLog(logFileTailQuery, new SearchCriteria());
+      return getLogAsPaginationProvided(logFileTailQuery, serviceLogsSolrDao);
+
+    } catch (NumberFormatException ne) {
+
+      throw RESTErrorUtil.createRESTException(ne.getMessage(),
+        MessageEnums.ERROR_SYSTEM);
+
+    }
+  }
+
+  @Override
+  protected List<ServiceLogData> convertToSolrBeans(QueryResponse response) {
+    throw new UnsupportedOperationException();
+  }
+
+  @Override
+  protected ServiceLogResponse createLogSearchResponse() {
+    throw new UnsupportedOperationException();
+  }
+}


[28/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCount.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCount.java
deleted file mode 100644
index 7832fcc..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCount.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-public class VCount implements java.io.Serializable {
-  private static final long serialVersionUID = 1L;
-
-  protected String name;
-
-  protected Long count;
-
-  /**
-   * Default constructor. This will set all the attributes to default value.
-   */
-  public VCount() {
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public Long getCount() {
-    return count;
-  }
-
-  public void setCount(Long count) {
-    this.count = count;
-  }
-
-  public String toString() {
-    String str = "VLogLevel={";
-    str += super.toString();
-    str += "name={" + name + "} ";
-    str += "count={" + count + "} ";
-    str += "}";
-    return str;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
deleted file mode 100644
index f105478..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VCountList.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VCountList extends VList {
-  private static final long serialVersionUID = 1L;
-
-  protected List<VCount> vCounts;
-
-  public VCountList() {
-    super();
-    vCounts = new ArrayList<VCount>();
-  }
-
-  public VCountList(List<VCount> logList) {
-    super(logList);
-    this.vCounts = logList;
-  }
-
-  public void setCounts(List<VCount> list) {
-    this.vCounts = list;
-  }
-
-  @Override
-  public int getListSize() {
-    if (vCounts != null)
-      return vCounts.size();
-    return 0;
-  }
-
-  @Override
-  public List<VCount> getList() {
-    return vCounts;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphData.java
deleted file mode 100644
index 1eebfac..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphData.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.io.Serializable;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VGraphData implements Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  protected String name;
-
-  protected Long count;
-
-  protected List<VGraphData> dataList;
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public Long getCount() {
-    return count;
-  }
-
-  public void setCount(Long info) {
-    this.count = info;
-  }
-
-  public List<VGraphData> getDataList() {
-    return dataList;
-  }
-
-  public void setDataList(List<VGraphData> dataList) {
-    this.dataList = dataList;
-  }
-
-  @Override
-  public String toString() {
-    String str = "VGraphData={";
-    str += super.toString();
-    str += "info={ " + count + " } ";
-    str += "dataList={ " + dataList + " } ";
-    return str;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphInfo.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphInfo.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphInfo.java
deleted file mode 100644
index 2bf75b5..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGraphInfo.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.io.Serializable;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VGraphInfo implements Serializable {
-
-  /**
-   * 
-   */
-  private static final long serialVersionUID = 1L;
-
-  protected List<VGraphData> graphData;
-
-  public List<VGraphData> getGraphData() {
-    return graphData;
-  }
-
-  public void setGraphData(List<VGraphData> graphData) {
-    this.graphData = graphData;
-  }
-
-  @Override
-  public String toString() {
-    String str = "VGraphInfo={";
-    str += super.toString();
-    str += "graphData={ " + graphData + " }";
-    return str;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
deleted file mode 100644
index 5e6c3fa..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VGroupList.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.solr.common.SolrDocumentList;
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VGroupList extends VList {
-  private static final long serialVersionUID = 1L;
-
-  protected SolrDocumentList groupList;
-
-  public VGroupList() {
-    super();
-    groupList = new SolrDocumentList();
-  }
-
-  public VGroupList(SolrDocumentList logList) {
-    super(logList);
-    this.groupList = logList;
-  }
-
-  public void setGroupDocuments(SolrDocumentList list) {
-    this.groupList = list;
-  }
-
-  @Override
-  public int getListSize() {
-    if (groupList != null)
-      return groupList.size();
-    return 0;
-  }
-
-  @Override
-  public SolrDocumentList getList() {
-    return groupList;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFile.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFile.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFile.java
deleted file mode 100644
index 57e32d2..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFile.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VLogFile {
-
-  private String name;
-
-  private String path;
-
- 
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public String getPath() {
-    return path;
-  }
-
-  public void setPath(String path) {
-    this.path = path;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFileList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFileList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFileList.java
deleted file mode 100644
index a79dae7..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VLogFileList.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VLogFileList extends VList {
-
-  /**
-   * 
-   */
-  private static final long serialVersionUID = 1L;
-
-  private List<VLogFile> logFiles;
-
-  public VLogFileList() {
-    logFiles = new ArrayList<VLogFile>();
-  }
-
-  @Override
-  public int getListSize() {
-    if (logFiles == null) {
-      return 0;
-    }
-    return logFiles.size();
-  }
-
-  @Override
-  public List<?> getList() {
-    return logFiles;
-  }
-
-  public List<VLogFile> getLogFiles() {
-    return logFiles;
-  }
-
-  public void setLogFiles(List<VLogFile> logFiles) {
-    this.logFiles = logFiles;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
deleted file mode 100644
index adbd6e0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValue.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-public class VNameValue implements java.io.Serializable {
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * Name
-   */
-  protected String name;
-  /**
-   * Value
-   */
-  protected String value;
-
-  /**
-   * Default constructor. This will set all the attributes to default value.
-   */
-  public VNameValue() {
-  }
-
-  /**
-   * @param name
-   *            the key
-   * @param value
-   *            the value
-   */
-  public VNameValue(String name, String value) {
-
-    this.name = name;
-    this.value = value;
-  }
-
-  /**
-   * This method sets the value to the member attribute <b>name</b>. You
-   * cannot set null to the attribute.
-   * 
-   * @param name
-   *            Value to set member attribute <b>name</b>
-   */
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  /**
-   * Returns the value for the member attribute <b>name</b>
-   * 
-   * @return String - value of member attribute <b>name</b>.
-   */
-  public String getName() {
-    return this.name;
-  }
-
-  /**
-   * This method sets the value to the member attribute <b>value</b>. You
-   * cannot set null to the attribute.
-   * 
-   * @param value
-   *            Value to set member attribute <b>value</b>
-   */
-  public void setValue(String value) {
-    if(value.contains(".") && (value.contains("e") || value.contains("E"))){
-      this.value=getExponentialValueReplaced(value);
-    }else{
-      this.value = value;
-    }
-  }
-
-  /**
-   * Returns the value for the member attribute <b>value</b>
-   * 
-   * @return String - value of member attribute <b>value</b>.
-   */
-  public String getValue() {
-    return this.value;
-  }
-
-  /**
-   * This return the bean content in string format
-   * 
-   * @return formatedStr
-   */
-  public String toString() {
-    String str = "VNameValue={";
-    str += super.toString();
-    str += "name={" + name + "} ";
-    str += "value={" + value + "} ";
-    str += "}";
-    return str;
-  }
-  
-  private String getExponentialValueReplaced(String value) {
-    try{
-      Double number = Double.parseDouble(value);
-      String newValue = String.format("%.0f", number);
-      return newValue;
-      
-    }catch(Exception e){
-      return value;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
deleted file mode 100644
index e95249d..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNameValueList.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VNameValueList extends VList {
-  private static final long serialVersionUID = 1L;
-  protected List<VNameValue> vNameValues = new ArrayList<VNameValue>();
-
-  public VNameValueList() {
-    super();
-  }
-
-  public VNameValueList(List<VNameValue> objList) {
-    super(objList);
-    this.vNameValues = objList;
-  }
-
-  /**
-   * @return the vNameValues
-   */
-  public List<VNameValue> getVNameValues() {
-    return vNameValues;
-  }
-
-  /**
-   * @param vNameValues
-   *            the vNameValues to set
-   */
-  public void setVNameValues(List<VNameValue> vNameValues) {
-    this.vNameValues = vNameValues;
-  }
-
-  @Override
-  public int getListSize() {
-    if (vNameValues != null) {
-      return vNameValues.size();
-    }
-    return 0;
-  }
-
-  @Override
-  public List<?> getList() {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-//  @Override
-//  public List<VNameValue> getList() {
-//    return vNameValues;
-//  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNode.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNode.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNode.java
deleted file mode 100644
index a16ded8..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNode.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.Collection;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VNode {
-
-  private String name;
-
-  private String type;
-  
-  private String value;
-
-  private boolean isRoot;
-
-  private Collection<VNode> childs;
-
-  private Collection<VNameValue> logLevelCount;
-
-  private boolean isParent;
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  public boolean isRoot() {
-    return isRoot;
-  }
-
-  public void setRoot(boolean isRoot) {
-    this.isRoot = isRoot;
-  }
-
-  public Collection<VNode> getChilds() {
-    return childs;
-  }
-
-  public void setChilds(Collection<VNode> childs) {
-    this.childs = childs;
-  }
-
-  public boolean isParent() {
-    return isParent;
-  }
-
-  public void setParent(boolean isParent) {
-    this.isParent = isParent;
-  }
-
-  public Collection<VNameValue> getLogLevelCount() {
-    return logLevelCount;
-  }
-
-  public void setLogLevelCount(Collection<VNameValue> logLevelCount) {
-    this.logLevelCount = logLevelCount;
-  }
-  
-  public String getValue() {
-    return value;
-  }
-
-  public void setValue(String value) {
-    this.value = value;
-  }
-
-  @Override
-  public String toString() {
-    String str = "VNode={";
-    str += "name={" + name + "} ";
-    str += "value={" + value + "} ";
-    str += "type={" + type + "} ";
-    str += "isRoot={" + isRoot + "} ";
-    str += "isParent={" + isParent + "} ";
-    str += "logLevelCount={" + logLevelCount + "} ";
-    str += "childs={" + childs + "} ";
-    str += "}";
-    return str;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
deleted file mode 100644
index 78f32ce..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VNodeList.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VNodeList extends VList {
-  private static final long serialVersionUID = 1L;
-  protected List<VNode> vNodeList = new ArrayList<VNode>();
-
-  public List<VNode> getvNodeList() {
-    return vNodeList;
-  }
-
-  public void setvNodeList(List<VNode> vNodeList) {
-    this.vNodeList = vNodeList;
-  }
-
-  @Override
-  public int getListSize() {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  @Override
-  public List<VNode> getList() {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
deleted file mode 100644
index 55cc089..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSolrLogList.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.apache.solr.common.SolrDocumentList;
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VSolrLogList extends VList {
-  private static final long serialVersionUID = 1L;
-
-  protected SolrDocumentList logList;
-
-  public VSolrLogList() {
-    super();
-    logList = new SolrDocumentList();
-  }
-
-  public VSolrLogList(SolrDocumentList logList) {
-    super(logList);
-    this.logList = logList;
-  }
-
-  public void setSolrDocuments(SolrDocumentList list) {
-    this.logList = list;
-  }
-
-  @Override
-  public int getListSize() {
-    if (logList != null){
-      return logList.size();
-    }
-    return 0;
-  }
-
-  @Override
-  public SolrDocumentList getList() {
-    return logList;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VString.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VString.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VString.java
deleted file mode 100644
index bda7565..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VString.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.view;
-
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-public class VString {
-
-  /**
-   * Value
-   */
-  protected String value;
-
-  /**
-   * Default constructor. This will set all the attributes to default value.
-   */
-  public VString() {
-  }
-
-  public String getValue() {
-    return value;
-  }
-
-  public void setValue(String value) {
-    this.value = value;
-  }
-
-  public String toString() {
-    String str = "VString={";
-    str += super.toString();
-    str += "value={" + value + "} ";
-    str += "}";
-    return str;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
deleted file mode 100644
index b8606d0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCount.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.List;
-
-public class VSummaryCount {
-
-  protected String level;
-
-  protected List<String> cricticalMsg;
-
-  protected List<String> compName;
-
-  protected List<Long> countMsg;
-
-  public String getLevel() {
-    return level;
-  }
-
-  public void setLevel(String level) {
-    this.level = level;
-  }
-
-  public List<String> getCricticalMsg() {
-    return cricticalMsg;
-  }
-
-  public void setCricticalMsg(List<String> cricticalMsg) {
-    this.cricticalMsg = cricticalMsg;
-  }
-
-  public List<String> getCompName() {
-    return compName;
-  }
-
-  public void setCompName(List<String> compName) {
-    this.compName = compName;
-  }
-
-  public List<Long> getCountMsg() {
-    return countMsg;
-  }
-
-  public void setCountMsg(List<Long> countMsg) {
-    this.countMsg = countMsg;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
deleted file mode 100644
index fcab0b7..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/view/VSummaryCountList.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.view;
-
-import java.util.ArrayList;
-import java.util.Collection;
-
-public class VSummaryCountList {
-
-  protected Collection<VSummaryCount> countList = new ArrayList<VSummaryCount>();
-
-  public Collection<VSummaryCount> getCountList() {
-    return countList;
-  }
-
-  public void setCountList(Collection<VSummaryCount> countList) {
-    this.countList = countList;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
index 9fb285e..a34fbd0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
@@ -20,6 +20,7 @@ package org.apache.ambari.logsearch.web.filters;
 
 import java.io.IOException;
 
+import javax.inject.Inject;
 import javax.servlet.FilterChain;
 import javax.servlet.ServletException;
 import javax.servlet.ServletRequest;
@@ -30,11 +31,10 @@ import javax.servlet.http.HttpServletResponse;
 import javax.servlet.http.HttpSession;
 
 import org.apache.ambari.logsearch.common.LogSearchContext;
-import org.apache.ambari.logsearch.manager.SessionMgr;
+import org.apache.ambari.logsearch.manager.SessionManager;
 import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.AnonymousAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
@@ -47,8 +47,8 @@ public class LogsearchSecurityContextFormationFilter extends GenericFilterBean {
   public static final String LOGSEARCH_SC_SESSION_KEY = "LOGSEARCH_SECURITY_CONTEXT";
   public static final String USER_AGENT = "User-Agent";
 
-  @Autowired
-  SessionMgr sessionMgr;
+  @Inject
+  SessionManager sessionManager;
 
   public LogsearchSecurityContextFormationFilter() {
   }
@@ -94,7 +94,7 @@ public class LogsearchSecurityContextFormationFilter extends GenericFilterBean {
           httpSession.setAttribute(LOGSEARCH_SC_SESSION_KEY, context);
         }
         LogSearchContext.setContext(context);
-        User user = sessionMgr.processSuccessLogin();
+        User user = sessionManager.processSuccessLogin();
         context.setUser(user);
       }
       chain.doFilter(request, response);

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
index 0f7377d..3534818 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
@@ -23,7 +23,6 @@ import java.util.HashMap;
 import org.apache.ambari.logsearch.dao.UserDao;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.AuthenticationException;
@@ -31,6 +30,8 @@ import org.springframework.security.core.userdetails.UserDetailsService;
 import org.springframework.security.web.authentication.WebAuthenticationDetails;
 import org.springframework.stereotype.Component;
 
+import javax.inject.Inject;
+
 @Component
 public class LogsearchAuthenticationProvider extends
   LogsearchAbstractAuthenticationProvider {
@@ -39,19 +40,19 @@ public class LogsearchAuthenticationProvider extends
   private static Logger auditLogger = Logger
     .getLogger("org.apache.ambari.logsearch.audit");
 
-  @Autowired
+  @Inject
   UserDao userDao;
 
-  @Autowired
+  @Inject
   LogsearchLdapAuthenticationProvider ldapAuthenticationProvider;
 
-  @Autowired
+  @Inject
   LogsearchFileAuthenticationProvider fileAuthenticationProvider;
 
-  @Autowired
+  @Inject
   LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
-  
-  @Autowired
+
+  @Inject
   LogsearchExternalServerAuthenticationProvider externalServerAuthenticationProvider;
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
index 4eea3e1..a06a381 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
@@ -22,14 +22,15 @@ import java.util.ArrayList;
 import java.util.List;
 
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
 
 import org.apache.ambari.logsearch.common.ExternalServerClient;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.BadCredentialsException;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.Authentication;
@@ -95,16 +96,11 @@ public class LogsearchExternalServerAuthenticationProvider extends
     };
   }
 
-  @Autowired
-  ExternalServerClient externalServerClient;
+  @Inject
+  private ExternalServerClient externalServerClient;
 
-  private String loginAPIURL = "/api/v1/users/$USERNAME/privileges?fields=*";// default
-
-  @PostConstruct
-  public void initialization() {
-    loginAPIURL = PropertiesHelper.getProperty(AUTH_METHOD_PROP_START_WITH
-        + "external_auth.login_url", loginAPIURL);
-  }
+  @Inject
+  private AuthConfig authConfig;
 
   /**
    * Authenticating user from external-server using REST call
@@ -134,7 +130,7 @@ public class LogsearchExternalServerAuthenticationProvider extends
     password = StringEscapeUtils.unescapeHtml(password);
     username = StringEscapeUtils.unescapeHtml(username);
     try {
-      String finalLoginUrl = loginAPIURL.replace("$USERNAME", username);
+      String finalLoginUrl = authConfig.getExternalAuthLoginUrl().replace("$USERNAME", username);
       String responseObj = (String) externalServerClient.sendGETRequest(
           finalLoginUrl, String.class, null, username, password);
       if (!isAllowedRole(responseObj)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
index 44c31c5..180de31 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
@@ -24,7 +24,6 @@ import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.security.authentication.BadCredentialsException;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.Authentication;
@@ -34,12 +33,14 @@ import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.security.core.userdetails.UserDetailsService;
 import org.springframework.stereotype.Component;
 
+import javax.inject.Inject;
+
 @Component
 public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthenticationProvider {
 
   private static Logger logger = Logger.getLogger(LogsearchFileAuthenticationProvider.class);
 
-  @Autowired
+  @Inject
   private UserDetailsService userDetailsService;
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/add_config_set.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/add_config_set.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/add_config_set.sh
deleted file mode 100755
index e9d3106..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/add_config_set.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [ $# -ne 4 ]; then
-    echo "Usage: $0 <solr_home> <zk_host_with_path> <config_name> <config_folder>"
-    echo "Example: $0 /opt/solr MY_ZKHOST/solr hadoop_logs `dirname $0`/configsets/hadoop_logs"
-    exit 1
-fi
-
-curr_dir=`pwd`
-cd `dirname $0`; script_dir=`pwd`; cd $curr_dir
-
-
-solr_home=$1
-zk_host=$2
-config_name=$3
-config_folder=$4
-
-tmp_folder=/tmp/solr_config_${config_name}_$USER
-rm -rf $tmp_folder
-
-$solr_home/server/scripts/cloud-scripts/zkcli.sh -zkhost $zk_host -cmd downconfig -confdir $tmp_folder -confname $config_name > /dev/null 2>&1 
-
-if [ -d $tmp_folder ]; then
-    echo "Config $config_name already existing. Will not add to zookeeper"
-else
-    echo "Adding config to $config_name to $zk_host"
-    $solr_home/server/scripts/cloud-scripts/zkcli.sh  -zkhost $zk_host -cmd upconfig -confdir $config_folder -confname $config_name
-    echo "Added config to $config_name to $zk_host"
-fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/create_collections.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/create_collections.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/create_collections.sh
deleted file mode 100755
index be728aa..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/scripts/create_collections.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [ $# -ne 4 ]; then
-    echo "Usage: $0 <solr_home> <number of shards> <number of replications> [configset folder]"
-    exit 1
-fi
-
-curr_dir=`pwd`
-cd `dirname $0`; script_dir=`pwd`; cd $curr_dir
-
-
-solr_home=$1
-shards=$2
-replications=$3
-
-configsets_folder=$4
-if [ "$configsets_folder" = "" ]; then
-    configsets_folder=${script_dir}/solr_configsets
-fi
-
-${solr_home}/bin/solr create -c hadoop_logs -d ${configsets_folder}/hadoop_logs/conf -s ${shards} -rf ${replications}
-${solr_home}/bin/solr create -c history -d ${configsets_folder}/history/conf -s 1 -rf ${shards}
-${solr_home}/bin/solr create -c audit_logs -d ${configsets_folder}/audit_logs/conf -s ${shards} -rf ${replications}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
deleted file mode 100644
index b457a1d..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
+++ /dev/null
@@ -1,69 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<beans xmlns="http://www.springframework.org/schema/beans"
-xmlns:aop="http://www.springframework.org/schema/aop" xmlns:jee="http://www.springframework.org/schema/jee"
-xmlns:tx="http://www.springframework.org/schema/tx" xmlns:context="http://www.springframework.org/schema/context"
-xmlns:task="http://www.springframework.org/schema/task" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:util="http://www.springframework.org/schema/util"
-xsi:schemaLocation="http://www.springframework.org/schema/aop
-http://www.springframework.org/schema/aop/spring-aop-4.2.xsd
-http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
-http://www.springframework.org/schema/context
-http://www.springframework.org/schema/context/spring-context-4.2.xsd
-http://www.springframework.org/schema/jee
-http://www.springframework.org/schema/jee/spring-jee-4.2.xsd
-http://www.springframework.org/schema/tx
-http://www.springframework.org/schema/tx/spring-tx-4.2.xsd
-http://www.springframework.org/schema/task
-http://www.springframework.org/schema/task/spring-task-4.2.xsd
-http://www.springframework.org/schema/util
-http://www.springframework.org/schema/util/spring-util.xsd">
-
-	<context:component-scan base-package="org.apache.ambari.logsearch" />
-	<task:annotation-driven />
-	<bean id="xmlPropertyConfigurer" class="org.apache.ambari.logsearch.common.XMLPropertiesHelper" />
-	
-	<bean id="propertyConfigurer" class="org.apache.ambari.logsearch.common.PropertiesHelper">
-		<property name="locations">
-			<list>
-				<value>classpath:default.properties</value>
-				<value>classpath:logsearch.properties</value>
-				<value>classpath:logsearch-admin-site.xml</value>
-			</list>
-		</property>
-		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
-	</bean>
-
-  <bean id="host" class="java.net.InetAddress" factory-method="getLocalHost"/>
-  <bean id="apiListingResource" class="io.swagger.jaxrs.listing.ApiListingResource"/>
-  <bean id="swaggerSerializers" class="io.swagger.jaxrs.listing.SwaggerSerializers" scope="singleton"/>
-  <bean id="beanConfig" class="io.swagger.jaxrs.config.BeanConfig">
-    <property name="schemes" value="http" /> <!-- TODO: set this from property -->
-    <property name="resourcePackage" value="org.apache.ambari.logsearch.rest"/>
-    <property name="version" value="1.0.0"/>
-    <property name="host" value="#{host.hostAddress}:61888"/> <!-- TODO: set port from property -->
-    <property name="basePath" value="/api/v1"/>
-    <property name="title" value="Log Search REST API"/>
-    <property name="description" value="Log aggregation, analysis, and visualization."/>
-    <property name="license" value="Apache 2.0"/>
-    <property name="licenseUrl" value="http://www.apache.org/licenses/LICENSE-2.0.html"/>
-    <property name="scan" value="true"/>
-  </bean>
-	
-</beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
index 233fad4..9961cd0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
@@ -34,6 +34,7 @@ http://www.springframework.org/schema/security/spring-security-4.0.xsd">
     <security:http pattern="/favicon.ico" security="none" />
     <security:http pattern="/api/v1/public/**" security="none" />
     <security:http pattern="/api/v1/swagger.json" security="none"/>
+    <security:http pattern="/api/v1/swagger.yaml" security="none"/>
   	
 	<security:http disable-url-rewriting="true" use-expressions="true" create-session="always" entry-point-ref="authenticationProcessingFilterEntryPoint">
 		<csrf disabled="true"/> 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
index dbe5210..0f1beec 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
@@ -16,21 +16,20 @@
   limitations under the License.
 -->
 <web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://java.sun.com/xml/ns/javaee" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd" id="WebApp_ID" version="3.0">
-  
- 
-	<display-name>LogSearch</display-name>
-
-	<context-param>
-		<param-name>contextConfigLocation</param-name>
-		<param-value>META-INF/applicationContext.xml
-					META-INF/security-applicationContext.xml
-		</param-value>
-	</context-param>
-
 
-	<listener>
-		<listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
-	</listener>
+  <listener>
+    <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
+  </listener>
+  <context-param>
+    <param-name>contextClass</param-name>
+    <param-value>
+      org.springframework.web.context.support.AnnotationConfigWebApplicationContext
+    </param-value>
+  </context-param>
+  <context-param>
+    <param-name>contextConfigLocation</param-name>
+    <param-value>org.apache.ambari.logsearch.conf.ApplicationConfig</param-value>
+  </context-param>
 
 	<listener>
 		<listener-class>org.springframework.web.context.request.RequestContextListener</listener-class>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
index da299a0..cd469de 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/model_bases/VUserFilterBase.js
@@ -27,7 +27,7 @@ define(['require',
 	var VUserFilterBase = BaseModel.extend(
 	/** @lends VUserFilterBase.prototype */
 	{
-		urlRoot: Globals.baseURL + 'userconfig/users/filter',
+		urlRoot: Globals.baseURL + 'userconfig/filters',
 
 		defaults: {},
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
index b8c43bb..2afb4e1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/ComponentListView.js
@@ -68,7 +68,7 @@ define(['require',
 		                    pageSize: 99999
 		                }
 					});
-					this.componentsList.url = Globals.baseURL + "service/logs/components/level/counts";
+					this.componentsList.url = Globals.baseURL + "service/logs/components/levels/counts";
 					this.hostList = new VNodeList([],{
 						state: {
 		                    firstPage: 0,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
index 35d0c9a..728d721 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/graphs/GraphLayoutView.js
@@ -84,7 +84,7 @@ define(['require',
                     this.collection.url = Globals.baseURL + "service/logs/histogram";
                     this.collection.modelAttrName = "graphData";
                 } else {
-                    this.collection.url = Globals.baseURL + "audit/logs/linegraph";
+                    this.collection.url = Globals.baseURL + "audit/logs/bargraph";
                     this.collection.modelAttrName = "graphData";
                     this.lineView = true;
                 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
index 8999e01..e250b02 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/graphs/backup.js
@@ -81,7 +81,7 @@ define(['require',
                     this.collection.url = Globals.baseURL + "service/logs/histogram";
                     this.collection.modelAttrName = "graphData";
                 } else {
-                    this.collection.url = Globals.baseURL + "audit/logs/linegraph";
+                    this.collection.url = Globals.baseURL + "audit/logs/bargraph";
                     this.collection.modelAttrName = "graphData";
                     this.lineView = true;
                 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
index 0b94b60..017d7a8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/AuditSolrDaoTest.java
@@ -21,18 +21,38 @@ package org.apache.ambari.logsearch.dao;
 
 import java.util.ArrayList;
 
+import org.apache.ambari.logsearch.conf.SolrAuditLogConfig;
+import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.util.NamedList;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.TestSubject;
+import org.junit.Before;
 import org.junit.Test;
 
 import junit.framework.Assert;
 
 public class AuditSolrDaoTest {
 
+  @TestSubject
+  private AuditSolrDao dao = new AuditSolrDao();
+
+  @Mock
+  private SolrAuditLogConfig configMock;
+
+  @Mock
+  private SolrKerberosConfig kerbConfigMock;
+
+  @Before
+  public void setUp() {
+    EasyMockSupport.injectMocks(this);
+  }
+
   @Test
   public void testAuditSolrDaoPostConstructor() throws Exception {
     SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
@@ -42,6 +62,18 @@ public class AuditSolrDaoTest {
     header.add("status", 0);
     response.add("responseHeader", header);
     response.add("collections", new ArrayList<String>());
+
+    EasyMock.expect(configMock.getSolrUrl()).andReturn(null);
+    EasyMock.expect(configMock.getZkConnectString()).andReturn("dummyHost1:2181,dummyHost2:2181");
+    EasyMock.expect(configMock.getConfigName()).andReturn("test_audit_logs_config_name");
+    EasyMock.expect(configMock.getCollection()).andReturn("test_audit_logs_collection");
+    EasyMock.expect(configMock.getSplitInterval()).andReturn("none");
+    EasyMock.expect(configMock.getNumberOfShards()).andReturn(123);
+    EasyMock.expect(configMock.getReplicationFactor()).andReturn(456);
+    EasyMock.expect(configMock.getAliasNameIn()).andReturn("alias");
+    EasyMock.expect(configMock.getRangerCollection()).andReturn("ranger_audit");
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
     
     Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
     
@@ -50,9 +82,8 @@ public class AuditSolrDaoTest {
     mockSolrClient.request(EasyMock.capture(captureCreateRequest), EasyMock.anyString());
     EasyMock.expectLastCall().andReturn(response);
     
-    EasyMock.replay(mockSolrClient);
-    
-    AuditSolrDao dao = new AuditSolrDao();
+    EasyMock.replay(mockSolrClient, configMock, kerbConfigMock);
+
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
index 2985a62..bdee8a8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDaoTest.java
@@ -21,18 +21,38 @@ package org.apache.ambari.logsearch.dao;
 
 import java.util.ArrayList;
 
+import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
+import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.common.util.NamedList;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.TestSubject;
+import org.junit.Before;
 import org.junit.Test;
 
 import junit.framework.Assert;
 
 public class ServiceLogsSolrDaoTest {
 
+  @TestSubject
+  private ServiceLogsSolrDao dao = new ServiceLogsSolrDao();
+
+  @Mock
+  private SolrKerberosConfig kerbConfigMock;
+
+  @Mock
+  private SolrServiceLogConfig configMock;
+
+  @Before
+  public void setUp() {
+    EasyMockSupport.injectMocks(this);
+  }
+
   @Test
   public void testServiceLogsSolrDaoPostConstructor() throws Exception {
     SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
@@ -42,20 +62,29 @@ public class ServiceLogsSolrDaoTest {
     header.add("status", 0);
     response.add("responseHeader", header);
     response.add("collections", new ArrayList<String>());
+
+    EasyMock.expect(configMock.getSolrUrl()).andReturn(null);
+    EasyMock.expect(configMock.getZkConnectString()).andReturn("dummyHost1:2181,dummyHost2:2181");
+    EasyMock.expect(configMock.getConfigName()).andReturn("test_service_logs_config_name");
+    EasyMock.expect(configMock.getCollection()).andReturn("test_service_logs_collection");
+    EasyMock.expect(configMock.getSplitInterval()).andReturn("none");
+    EasyMock.expect(configMock.getNumberOfShards()).andReturn(789);
+    EasyMock.expect(configMock.getReplicationFactor()).andReturn(987);
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
     
     Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
     
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
     mockSolrClient.request(EasyMock.capture(captureCreateRequest), EasyMock.anyString()); EasyMock.expectLastCall().andReturn(response);
     
-    EasyMock.replay(mockSolrClient);
-    
-    ServiceLogsSolrDao dao = new ServiceLogsSolrDao();
+    EasyMock.replay(mockSolrClient, configMock, kerbConfigMock);
+
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
     
     dao.postConstructor();
-    EasyMock.verify(mockSolrClient);
+    EasyMock.verify(mockSolrClient, configMock, kerbConfigMock);
     
     CollectionAdminRequest.Create createRequest = captureCreateRequest.getValue();
     Assert.assertEquals(createRequest.getConfigName(), "test_service_logs_config_name");

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
index ba5b074..22b10c3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/SolrDaoBaseTest.java
@@ -24,11 +24,10 @@ import java.util.Arrays;
 
 import javax.ws.rs.WebApplicationException;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrRequest.METHOD;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
@@ -38,6 +37,10 @@ import org.apache.solr.client.solrj.response.UpdateResponse;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.util.NamedList;
 import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.TestSubject;
+import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -45,14 +48,30 @@ import org.junit.rules.ExpectedException;
 import junit.framework.Assert;
 
 public class SolrDaoBaseTest {
+
+  @TestSubject
+  private SolrDaoBase dao = new SolrDaoBase(LogType.SERVICE) {};
+
   @Rule
   public ExpectedException expectedException = ExpectedException.none();
+
+  @Mock
+  private SolrKerberosConfig kerbConfigMock;
+
+  @Before
+  public void setUp() {
+    EasyMockSupport.injectMocks(this);
+  }
   
   // ----------------------------------------------------------- connectToSolr -----------------------------------------------------------
   
   @Test
   public void testConnectToSolrWithConnectString() throws Exception {
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
+
+    EasyMock.replay(kerbConfigMock);
+
     SolrClient solrClient = dao.connectToSolr(null, "zk_connect_string", "collection");
     
     Assert.assertEquals(solrClient.getClass(), CloudSolrClient.class);
@@ -60,7 +79,11 @@ public class SolrDaoBaseTest {
   
   @Test
   public void testConnectToSolrWithUrl() throws Exception {
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
+
+    EasyMock.replay(kerbConfigMock);
+
     SolrClient solrClient = dao.connectToSolr("url", null, "collection");
     
     Assert.assertEquals(solrClient.getClass(), HttpSolrClient.class);
@@ -68,7 +91,11 @@ public class SolrDaoBaseTest {
   
   @Test
   public void testConnectToSolrWithBoth() throws Exception {
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
+
+    EasyMock.replay(kerbConfigMock);
+
     SolrClient solrClient = dao.connectToSolr("url", "zk_connect_string", "collection");
     
     Assert.assertEquals(solrClient.getClass(), CloudSolrClient.class);
@@ -76,10 +103,14 @@ public class SolrDaoBaseTest {
   
   @Test
   public void testConnectToSolrWithNeither() throws Exception {
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
+
+    EasyMock.replay(kerbConfigMock);
+
     expectedException.expect(Exception.class);
     expectedException.expectMessage("Both zkConnectString and URL are empty. zkConnectString=null, collection=collection, url=null");
 
-    SolrDaoBase dao = new SolrDaoBase(null) {};
     dao.connectToSolr(null, null, "collection");
   }
   
@@ -88,7 +119,6 @@ public class SolrDaoBaseTest {
     expectedException.expect(Exception.class);
     expectedException.expectMessage("For solr, collection name is mandatory. zkConnectString=zk_connect_string, collection=null, url=url");
 
-    SolrDaoBase dao = new SolrDaoBase(null) {};
     dao.connectToSolr("url", "zk_connect_string", null);
   }
   
@@ -106,8 +136,7 @@ public class SolrDaoBaseTest {
     
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.solrClient = mockSolrClient;
     
     boolean status = dao.checkSolrStatus(10000);
@@ -120,8 +149,7 @@ public class SolrDaoBaseTest {
   public void testCheckSolrStatusNotSuccessful() throws Exception {
     SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.solrClient = mockSolrClient;
     
     boolean status = dao.checkSolrStatus(10000);
@@ -148,8 +176,7 @@ public class SolrDaoBaseTest {
     mockSolrClouldClient.setDefaultCollection("alias_name"); EasyMock.expectLastCall();
     
     EasyMock.replay(mockSolrClient, mockSolrClouldClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.isZkConnectString = true;
     dao.solrClient = mockSolrClient;
     dao.solrClouldClient = mockSolrClouldClient;
@@ -177,8 +204,7 @@ public class SolrDaoBaseTest {
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.Create.class), EasyMock.anyString())).andReturn(response);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.isZkConnectString = true;
     dao.solrClient = mockSolrClient;
     dao.collectionName = "test_collection";
@@ -201,8 +227,7 @@ public class SolrDaoBaseTest {
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.List.class), EasyMock.anyString())).andReturn(response);
     EasyMock.expect(mockSolrClient.request(EasyMock.anyObject(CollectionAdminRequest.Create.class), EasyMock.anyString())).andReturn(response);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.isZkConnectString = true;
     dao.solrClient = mockSolrClient;
     
@@ -218,8 +243,7 @@ public class SolrDaoBaseTest {
     SolrClient mockSolrClient = EasyMock.strictMock(SolrClient.class);
     EasyMock.expect(mockSolrClient.query(EasyMock.anyObject(SolrQuery.class), EasyMock.eq(METHOD.POST))).andReturn(new QueryResponse());
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.solrClient = mockSolrClient;
     
     dao.process(new SolrQuery());
@@ -230,8 +254,7 @@ public class SolrDaoBaseTest {
   @Test
   public void testProcessNoConnection() throws Exception {
     expectedException.expect(WebApplicationException.class);
-    
-    SolrDaoBase dao = new SolrDaoBase(LogType.SERVICE) {};
+
     dao.process(new SolrQuery());
   }
   
@@ -251,8 +274,7 @@ public class SolrDaoBaseTest {
     EasyMock.expect(mockSolrClient.add(EasyMock.anyObject(SolrInputDocument.class))).andReturn(updateResponse);
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.solrClient = mockSolrClient;
     
     dao.addDocs(new SolrInputDocument());
@@ -274,8 +296,7 @@ public class SolrDaoBaseTest {
     EasyMock.expect(mockSolrClient.deleteByQuery(EasyMock.anyString())).andReturn(updateResponse);
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
     EasyMock.replay(mockSolrClient);
-    
-    SolrDaoBase dao = new SolrDaoBase(null) {};
+
     dao.solrClient = mockSolrClient;
     
     dao.removeDoc("query");

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
index dddbf31..3b01a19 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/dao/UserConfigSolrDaoTest.java
@@ -20,7 +20,10 @@
 package org.apache.ambari.logsearch.dao;
 
 import java.util.ArrayList;
+import java.util.Arrays;
 
+import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
+import org.apache.ambari.logsearch.conf.SolrUserConfig;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest.METHOD;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -32,11 +35,29 @@ import org.apache.solr.common.util.NamedList;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
 import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.easymock.Mock;
+import org.easymock.TestSubject;
+import org.junit.Before;
 import org.junit.Test;
 
 import junit.framework.Assert;
 
 public class UserConfigSolrDaoTest {
+
+  @TestSubject
+  private UserConfigSolrDao dao = new UserConfigSolrDao();
+
+  @Mock
+  private SolrUserConfig configMock;
+
+  @Mock
+  private SolrKerberosConfig kerbConfigMock;
+
+  @Before
+  public void setUp() {
+    EasyMockSupport.injectMocks(this);
+  }
   
   @Test
   public void testUserConfigDaoPostConstructor() throws Exception {
@@ -56,6 +77,17 @@ public class UserConfigSolrDaoTest {
     updateResponseHeader.add("QTime", 1);
     updateResponseContent.add("responseHeader", updateResponseHeader);
     updateResponse.setResponse(updateResponseContent);
+
+    EasyMock.expect(configMock.getSolrUrl()).andReturn(null).times(2);
+    EasyMock.expect(configMock.getZkConnectString()).andReturn("dummyHost1:2181,dummyHost2:2181").times(2);
+    EasyMock.expect(configMock.getConfigName()).andReturn("test_history_logs_config_name").times(2);
+    EasyMock.expect(configMock.getCollection()).andReturn("test_history_logs_collection").times(2);
+    EasyMock.expect(configMock.getSplitInterval()).andReturn("none").times(2);
+    EasyMock.expect(configMock.getNumberOfShards()).andReturn(123).times(2);
+    EasyMock.expect(configMock.getReplicationFactor()).andReturn(234).times(2);
+    EasyMock.expect(configMock.getLogLevels()).andReturn(Arrays.asList("TRACE")).times(2);
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false).times(2);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file").times(2);
     
     Capture<CollectionAdminRequest.Create> captureCreateRequest = EasyMock.newCapture(CaptureType.LAST);
     Capture<SolrParams> captureSolrParams = EasyMock.newCapture(CaptureType.LAST);
@@ -67,9 +99,8 @@ public class UserConfigSolrDaoTest {
     mockSolrClient.query(EasyMock.capture(captureSolrParams), EasyMock.capture(captureMethod)); EasyMock.expectLastCall().andReturn(queryResponse);
     mockSolrClient.add(EasyMock.capture(captureSolrInputDocument)); EasyMock.expectLastCall().andReturn(updateResponse);
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
-    EasyMock.replay(mockSolrClient);
-    
-    UserConfigSolrDao dao = new UserConfigSolrDao();
+    EasyMock.replay(mockSolrClient, configMock, kerbConfigMock);
+
     dao.postConstructor();
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
@@ -92,7 +123,7 @@ public class UserConfigSolrDaoTest {
     SolrInputDocument solrInputDocument = captureSolrInputDocument.getValue();
     Assert.assertNotNull(solrInputDocument.getFieldValue("id"));
     Assert.assertEquals(solrInputDocument.getFieldValue("rowtype"), "log_feeder_config");
-    Assert.assertEquals(solrInputDocument.getFieldValue("jsons"), "{\"filter\":{\"test_component2\":{\"label\":\"test_component2\",\"hosts\":[],\"defaultLevels\":[\"FATAL\",\"ERROR\",\"WARN\",\"INFO\",\"DEBUG\",\"TRACE\"],\"overrideLevels\":[]},\"test_component1\":{\"label\":\"test_component1\",\"hosts\":[],\"defaultLevels\":[\"FATAL\",\"ERROR\",\"WARN\",\"INFO\",\"DEBUG\",\"TRACE\"],\"overrideLevels\":[]}},\"id\":\"" + solrInputDocument.getFieldValue("id") + "\"}");
+    Assert.assertEquals(solrInputDocument.getFieldValue("jsons"), "{\"filter\":{\"test_component2\":{\"label\":\"test_component2\",\"hosts\":[],\"defaultLevels\":[\"TRACE\"],\"overrideLevels\":[]},\"test_component1\":{\"label\":\"test_component1\",\"hosts\":[],\"defaultLevels\":[\"TRACE\"],\"overrideLevels\":[]}},\"id\":\"" + solrInputDocument.getFieldValue("id") + "\"}");
     Assert.assertEquals(solrInputDocument.getFieldValue("username"), "log_feeder_config");
     Assert.assertEquals(solrInputDocument.getFieldValue("filtername"), "log_feeder_config");
   }
@@ -107,18 +138,27 @@ public class UserConfigSolrDaoTest {
     header.add("QTime", 1);
     response.add("responseHeader", header);
     updateResponse.setResponse(response);
+
+    EasyMock.expect(configMock.getSolrUrl()).andReturn(null);
+    EasyMock.expect(configMock.getZkConnectString()).andReturn("dummyHost1:2181,dummyHost2:2181");
+    EasyMock.expect(configMock.getConfigName()).andReturn("test_history_logs_config_name");
+    EasyMock.expect(configMock.getCollection()).andReturn("test_history_logs_collection");
+    EasyMock.expect(configMock.getSplitInterval()).andReturn("none");
+    EasyMock.expect(configMock.getNumberOfShards()).andReturn(123);
+    EasyMock.expect(configMock.getReplicationFactor()).andReturn(234);
+    EasyMock.expect(kerbConfigMock.isEnabled()).andReturn(false);
+    EasyMock.expect(kerbConfigMock.getJaasFile()).andReturn("jaas_file");
     
     EasyMock.expect(mockSolrClient.deleteByQuery("id:test_id")).andReturn(updateResponse);
     EasyMock.expect(mockSolrClient.commit()).andReturn(updateResponse);
-    EasyMock.replay(mockSolrClient);
-    
-    UserConfigSolrDao dao = new UserConfigSolrDao();
+    EasyMock.replay(mockSolrClient, configMock, kerbConfigMock);
+
     dao.postConstructor();
     dao.solrClient = mockSolrClient;
     dao.isZkConnectString = true;
     
     dao.deleteUserConfig("test_id");
     
-    EasyMock.verify(mockSolrClient);
+    EasyMock.verify(mockSolrClient, configMock, kerbConfigMock);
   }
 }


[16/50] [abbrv] ambari git commit: AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index 257f04d..9baccce 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -36,7 +36,6 @@ import java.util.TimeZone;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 
-import javax.servlet.http.HttpServletRequest;
 import javax.ws.rs.core.Response;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
@@ -62,6 +61,8 @@ import org.apache.ambari.logsearch.view.VNode;
 import org.apache.ambari.logsearch.view.VNodeList;
 import org.apache.ambari.logsearch.view.VSolrLogList;
 import org.apache.ambari.logsearch.view.VSummary;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.time.DateUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -83,29 +84,24 @@ import com.google.common.collect.Lists;
 
 @Component
 public class LogsMgr extends MgrBase {
-  private static Logger logger = Logger.getLogger(LogsMgr.class);
+  private static final Logger logger = Logger.getLogger(LogsMgr.class);
 
-  public static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
+  private static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
 
-  public static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
-
-  public static enum CONDITION {
+  private static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
+  
+  private static enum CONDITION {
     OR, AND
   }
 
   @Autowired
-  ServiceLogsSolrDao serviceLogsSolrDao;
-
+  private ServiceLogsSolrDao serviceLogsSolrDao;
   @Autowired
-  BizUtil bizUtil;
-
+  private BizUtil bizUtil;
   @Autowired
-  FileUtil fileUtil;
-
-
+  private FileUtil fileUtil;
   @Autowired
-  GraphDataGenerator graphDataGenerator;
-
+  private GraphDataGenerator graphDataGenerator;
 
   public String searchLogs(SearchCriteria searchCriteria) {
     String keyword = (String) searchCriteria.getParamValue("keyword");
@@ -113,7 +109,7 @@ public class LogsMgr extends MgrBase {
     String lastPage = (String)  searchCriteria.getParamValue("isLastPage");
     Boolean isLastPage = Boolean.parseBoolean(lastPage);
 
-    if (!stringUtil.isEmpty(keyword)) {
+    if (!StringUtils.isBlank(keyword)) {
       try {
         return getPageByKeyword(searchCriteria);
       } catch (SolrException | SolrServerException e) {
@@ -121,7 +117,7 @@ public class LogsMgr extends MgrBase {
         throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
             .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
-    } else if (!stringUtil.isEmpty(logId)) {
+    } else if (!StringUtils.isBlank(logId)) {
       try {
         return getPageByLogId(searchCriteria);
       } catch (SolrException e) {
@@ -148,11 +144,11 @@ public class LogsMgr extends MgrBase {
     }
   }
 
-  public String getHosts(SearchCriteria searchCriteria) {
-    return getFields(searchCriteria, LogSearchConstants.SOLR_HOST);
+  public String getHosts() {
+    return getFields(LogSearchConstants.SOLR_HOST);
   }
-
-  public String getFields(SearchCriteria searchCriteria,String field){
+  
+  private String getFields(String field){
 
     SolrQuery solrQuery = new SolrQuery();
     VGroupList collection = new VGroupList();
@@ -200,8 +196,8 @@ public class LogsMgr extends MgrBase {
 
   }
 
-  public String getComponents(SearchCriteria searchCriteria) {
-    return getFields(searchCriteria, LogSearchConstants.SOLR_COMPONENT);
+  public String getComponents() {
+    return getFields(LogSearchConstants.SOLR_COMPONENT);
   }
 
   public String getAggregatedInfo(SearchCriteria searchCriteria) {
@@ -255,7 +251,7 @@ public class LogsMgr extends MgrBase {
     return logList;
   }
 
-  public VCountList getFieldCount(SearchCriteria searchCriteria, String field){
+  public VCountList getFieldCount(String field){
     VCountList collection = new VCountList();
     List<VCount> vCounts = new ArrayList<VCount>();
     SolrQuery solrQuery = new SolrQuery();
@@ -297,17 +293,17 @@ public class LogsMgr extends MgrBase {
     collection.setCounts(vCounts);
     return collection;
   }
-
-  public VCountList getLogLevelCount(SearchCriteria searchCriteria) {
-    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_LEVEL);
+  
+  public VCountList getLogLevelCount() {
+    return getFieldCount(LogSearchConstants.SOLR_LEVEL);
   }
 
-  public VCountList getComponentsCount(SearchCriteria searchCriteria) {
-    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_COMPONENT);
+  public VCountList getComponentsCount() {
+    return getFieldCount(LogSearchConstants.SOLR_COMPONENT);
   }
 
-  public VCountList getHostsCount(SearchCriteria searchCriteria) {
-    return getFieldCount(searchCriteria, LogSearchConstants.SOLR_HOST);
+  public VCountList getHostsCount() {
+    return getFieldCount(LogSearchConstants.SOLR_HOST);
   }
 
   public List<VNode> buidTreeData(List<PivotField> pivotFields,
@@ -322,13 +318,13 @@ public class LogsMgr extends MgrBase {
           VNode hostNode = new VNode();
           String name = (pivotHost.getValue() == null ? "" : ""+ pivotHost.getValue());
           String value = "" + pivotHost.getCount();
-          if(!stringUtil.isEmpty(name)){
+          if(!StringUtils.isBlank(name)){
             hostNode.setName(name);
           }
-          if(!stringUtil.isEmpty(value)){
+          if(!StringUtils.isBlank(value)){
             hostNode.setValue(value);
           }
-          if(!stringUtil.isEmpty(firstPriority)){
+          if(!StringUtils.isBlank(firstPriority)){
             hostNode.setType(firstPriority);
           }
 
@@ -336,7 +332,7 @@ public class LogsMgr extends MgrBase {
           hostNode.setRoot(true);
           PivotField hostPivot = null;
           for (PivotField searchHost : pivotFieldHost) {
-            if (!stringUtil.isEmpty(hostNode.getName())
+            if (!StringUtils.isBlank(hostNode.getName())
                 && hostNode.getName().equals(searchHost.getValue())) {
               hostPivot = searchHost;
               break;
@@ -369,7 +365,7 @@ public class LogsMgr extends MgrBase {
                 String compName = (pivotComp.getValue() == null ? "" : ""
                     + pivotComp.getValue());
                 compNode.setName(compName);
-                if (!stringUtil.isEmpty(secondPriority)) {
+                if (!StringUtils.isBlank(secondPriority)) {
                   compNode.setType(secondPriority);
                 }
                 compNode.setValue("" + pivotComp.getCount());
@@ -414,7 +410,7 @@ public class LogsMgr extends MgrBase {
     String hostName = ""
       + ((searchCriteria.getParamValue("hostName") == null) ? ""
       : searchCriteria.getParamValue("hostName"));
-    if (!stringUtil.isEmpty(hostName)){
+    if (!StringUtils.isBlank(hostName)){
       solrQuery.addFilterQuery(LogSearchConstants.SOLR_HOST + ":*"
         + hostName + "*");
     }
@@ -478,7 +474,7 @@ public class LogsMgr extends MgrBase {
     String componentName = ""
       + ((searchCriteria.getParamValue("componentName") == null) ? ""
       : searchCriteria.getParamValue("componentName"));
-    if (!stringUtil.isEmpty(componentName)){
+    if (!StringUtils.isBlank(componentName)){
       solrQuery.addFilterQuery(LogSearchConstants.SOLR_COMPONENT + ":"
         + componentName);
     } else {
@@ -552,7 +548,7 @@ public class LogsMgr extends MgrBase {
       for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
         VNameValue nameValue = new VNameValue();
         String value = map.get(level);
-        if (stringUtil.isEmpty(value)) {
+        if (StringUtils.isBlank(value)) {
           value = defalutValue;
         }
         nameValue.setName(level);
@@ -590,7 +586,7 @@ public class LogsMgr extends MgrBase {
     String defaultChoice = "0";
 
     String key = (String) searchCriteria.getParamValue("keyword");
-    if(stringUtil.isEmpty(key)){
+    if(StringUtils.isBlank(key)){
       throw restErrorUtil.createRESTException("Keyword was not given",
           MessageEnums.DATA_NOT_FOUND);
     }
@@ -651,7 +647,7 @@ public class LogsMgr extends MgrBase {
         nextPageLogID = ""
           + solrDoc.get(LogSearchConstants.ID);
 
-        if (stringUtil.isEmpty(nextPageLogID)){
+        if (StringUtils.isBlank(nextPageLogID)){
           nextPageLogID = "0";
         }
 
@@ -698,13 +694,13 @@ public class LogsMgr extends MgrBase {
         logTimeThroughRangeQuery.remove("start");
         logTimeThroughRangeQuery.remove("rows");
         logTimeThroughRangeQuery.setRows(1);
-        if (!stringUtil.isEmpty(filterQueryListIds)){
+        if (!StringUtils.isBlank(filterQueryListIds)){
           logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
         }
 
         String sortByType = searchCriteria.getSortType();
 
-        if (!stringUtil.isEmpty(sortByType) && sortByType
+        if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
 
           queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
@@ -759,16 +755,16 @@ public class LogsMgr extends MgrBase {
         rangeLogQuery.remove("start");
         rangeLogQuery.remove("rows");
 
-        if (!stringUtil.isEmpty(sortByType) && sortByType
+        if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-          keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, 1);
+          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, 1);
           String keywordDateTime = dateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, startTime,
             keywordDateTime);
         } else {
-          keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, -1);
+          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, -1);
           String keywordDateTime = dateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
@@ -795,8 +791,8 @@ public class LogsMgr extends MgrBase {
             String id = (String) solrDocumenent
               .getFieldValue(LogSearchConstants.ID);
             countNumberLogs++;
-
-            if (stringUtil.isEmpty(id) && id.equals(keywordId)){
+           
+            if (StringUtils.isBlank(id) && id.equals(keywordId)){
               break;
             }
           }
@@ -910,11 +906,11 @@ public class LogsMgr extends MgrBase {
         logTimeThroughRangeQuery.setRows(1);
         queryGenerator.setSingleExcludeFilter(logTimeThroughRangeQuery,
           LogSearchConstants.ID, lastLogsLogId);
-        if (!stringUtil.isEmpty(filterQueryListIds)){
+        if (!StringUtils.isBlank(filterQueryListIds)){
           logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
         }
 
-        if (!stringUtil.isEmpty(sortByType) && sortByType
+        if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
 
           logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
@@ -974,7 +970,7 @@ public class LogsMgr extends MgrBase {
         rangeLogQuery.remove("start");
         rangeLogQuery.remove("rows");
 
-        if (!stringUtil.isEmpty(sortByType) && sortByType
+        if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
        //   keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, 1);
           String keywordDateTime = dateUtil
@@ -1010,7 +1006,7 @@ public class LogsMgr extends MgrBase {
               String id = (String) solrDocumenent
                   .getFieldValue(LogSearchConstants.ID);
               countNumberLogs++;
-              if ( stringUtil.isEmpty(id) && id.equals(keywordId)) {
+              if ( StringUtils.isBlank(id) && id.equals(keywordId)) {
                 break;
               }
             }
@@ -1039,13 +1035,13 @@ public class LogsMgr extends MgrBase {
   private String getPageByLogId(SearchCriteria searchCriteria) {
     VSolrLogList vSolrLogList = new VSolrLogList();
     String endLogTime = (String) searchCriteria.getParamValue("to");
-    if(stringUtil.isEmpty(endLogTime)){
+    if(StringUtils.isBlank(endLogTime)){
       return convertObjToString(vSolrLogList);
     }
     long startIndex = 0l;
 
     String logId = (String) searchCriteria.getParamValue("sourceLogId");
-    if(stringUtil.isEmpty(logId)){
+    if(StringUtils.isBlank(logId)){
       return convertObjToString(vSolrLogList);
     }
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
@@ -1078,7 +1074,7 @@ public class LogsMgr extends MgrBase {
 
       if (dateOfLogId != null) {
         logTime = dateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
-        Date endDate = dateUtil.addMilliSecondsToDate(dateOfLogId, 1);
+        Date endDate = DateUtils.addMilliseconds(dateOfLogId, 1);
         endTimeMinusOneMilli = (String) dateUtil
             .convertDateWithMillisecondsToSolrDate(endDate);
       }
@@ -1111,7 +1107,7 @@ public class LogsMgr extends MgrBase {
         String id = (String) solrDocumenent
             .getFieldValue(LogSearchConstants.ID);
         startIndex++;
-        if (!stringUtil.isEmpty(id)) {
+        if (!StringUtils.isBlank(id)) {
           if (id.equals(logId)) {
             break;
           }
@@ -1280,7 +1276,7 @@ public class LogsMgr extends MgrBase {
   }
 
   public String cancelFindRequestByDate(String uniqueId) {
-    if (stringUtil.isEmpty(uniqueId)) {
+    if (StringUtils.isEmpty(uniqueId)) {
       logger.error("Unique id is Empty");
       throw restErrorUtil.createRESTException("Unique id is Empty",
         MessageEnums.DATA_NOT_FOUND);
@@ -1294,7 +1290,7 @@ public class LogsMgr extends MgrBase {
   }
 
   public boolean cancelRequest(String uniqueId) {
-    if (stringUtil.isEmpty(uniqueId)) {
+    if (StringUtils.isBlank(uniqueId)) {
       logger.error("Unique id is Empty");
       throw restErrorUtil.createRESTException("Unique id is Empty",
         MessageEnums.DATA_NOT_FOUND);
@@ -1317,8 +1313,8 @@ public class LogsMgr extends MgrBase {
 
     format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
         : ".json";
-
-    if(stringUtil.isEmpty(utcOffset)){
+    
+    if(StringUtils.isBlank(utcOffset)){
       utcOffset = "0";
     }
 
@@ -1368,7 +1364,7 @@ public class LogsMgr extends MgrBase {
       vsummary.setTo(to);
 
       String includeString = (String) searchCriteria.getParamValue("iMessage");
-      if (stringUtil.isEmpty(includeString)) {
+      if (StringUtils.isBlank(includeString)) {
         includeString = "";
       }
 
@@ -1378,7 +1374,7 @@ public class LogsMgr extends MgrBase {
         includeString = includeString + ",\"" + inc + "\"";
       }
       includeString = includeString.replaceFirst(",", "");
-      if (!stringUtil.isEmpty(includeString)) {
+      if (!StringUtils.isBlank(includeString)) {
         vsummary.setIncludeString(includeString);
       }
 
@@ -1386,7 +1382,7 @@ public class LogsMgr extends MgrBase {
       boolean isNormalExcluded = false;
 
       excludeString = (String) searchCriteria.getParamValue("eMessage");
-      if (stringUtil.isEmpty(excludeString)) {
+      if (StringUtils.isBlank(excludeString)) {
         excludeString = "";
       }
 
@@ -1396,14 +1392,14 @@ public class LogsMgr extends MgrBase {
       }
 
       excludeString = excludeString.replaceFirst(",", "");
-      if (!stringUtil.isEmpty(excludeString)) {
+      if (!StringUtils.isBlank(excludeString)) {
         vsummary.setExcludeString(excludeString);
         isNormalExcluded = true;
       }
 
       String globalExcludeString = (String) searchCriteria
           .getParamValue("gEMessage");
-      if (stringUtil.isEmpty(globalExcludeString)) {
+      if (StringUtils.isBlank(globalExcludeString)) {
         globalExcludeString = "";
       }
 
@@ -1414,7 +1410,7 @@ public class LogsMgr extends MgrBase {
         excludeString = excludeString + ",\"" + exc + "\"";
       }
 
-      if (!stringUtil.isEmpty(excludeString)) {
+      if (!StringUtils.isBlank(excludeString)) {
         if (!isNormalExcluded) {
           excludeString = excludeString.replaceFirst(",", "");
         }
@@ -1513,7 +1509,7 @@ public class LogsMgr extends MgrBase {
     try {
       String bundelId = (String) searchCriteria
         .getParamValue(LogSearchConstants.BUNDLE_ID);
-      if(stringUtil.isEmpty(bundelId)){
+      if(StringUtils.isBlank(bundelId)){
         bundelId = "";
       }
 
@@ -1709,18 +1705,18 @@ public class LogsMgr extends MgrBase {
     SolrDocumentList docList = null;
     String id = (String) searchCriteria
       .getParamValue(LogSearchConstants.ID);
-    if (stringUtil.isEmpty(id)) {
+    if (StringUtils.isBlank(id)) {
       return convertObjToString(vSolrLogList);
 
     }
     String maxRows = "";
 
     maxRows = (String) searchCriteria.getParamValue("numberRows");
-    if (stringUtil.isEmpty(maxRows)){
+    if (StringUtils.isBlank(maxRows)){
       maxRows = ""+maxRows;
     }
     String scrollType = (String) searchCriteria.getParamValue("scrollType");
-    if(stringUtil.isEmpty(scrollType)){
+    if(StringUtils.isBlank(scrollType)){
       scrollType = "";
     }
 
@@ -1744,7 +1740,7 @@ public class LogsMgr extends MgrBase {
           + docList.get(0).getFieldValue(
           LogSearchConstants.SEQUNCE_ID);
       }
-      if (stringUtil.isEmpty(logTime)) {
+      if (StringUtils.isBlank(logTime)) {
         return convertObjToString(vSolrLogList);
       }
     } catch (SolrServerException | SolrException | IOException e) {
@@ -1874,10 +1870,10 @@ public class LogsMgr extends MgrBase {
     TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT");
     GregorianCalendar utc = new GregorianCalendar(gmtTimeZone);
     utc.setTimeInMillis(new Date().getTime());
-    utc.set(GregorianCalendar.HOUR, 0);
-    utc.set(GregorianCalendar.MINUTE, 0);
-    utc.set(GregorianCalendar.MILLISECOND, 001);
-    utc.set(GregorianCalendar.SECOND, 0);
+    utc.set(Calendar.HOUR, 0);
+    utc.set(Calendar.MINUTE, 0);
+    utc.set(Calendar.MILLISECOND, 001);
+    utc.set(Calendar.SECOND, 0);
     dateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
     String from = dateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
     utc.set(Calendar.MILLISECOND, 999);
@@ -1889,7 +1885,7 @@ public class LogsMgr extends MgrBase {
         LogSearchConstants.LOGTIME, from,to);
     String level = LogSearchConstants.FATAL+","+LogSearchConstants.ERROR+","+LogSearchConstants.WARN;
     queryGenerator.setFilterClauseWithFieldName(solrQuery, level,
-        LogSearchConstants.SOLR_LEVEL, "", QueryGenerationBase.CONDITION.OR);
+        LogSearchConstants.SOLR_LEVEL, "", QueryGenerationBase.Condition.OR);
     try {
       serviceLogsSolrDao.process(solrQuery);
     } catch (SolrServerException | IOException e) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
index 99eb9f2..02e2e69 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
@@ -35,8 +35,8 @@ import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VSolrLogList;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -57,53 +57,46 @@ import com.google.gson.JsonSerializationContext;
 import com.google.gson.JsonSerializer;
 
 public class MgrBase {
-  static private Logger logger = Logger.getLogger(MgrBase.class);
+  private static final Logger logger = Logger.getLogger(MgrBase.class);
 
   @Autowired
-  SolrUtil solrUtil;
+  protected SolrUtil solrUtil;
 
   @Autowired
-  JSONUtil jsonUtil;
+  protected JSONUtil jsonUtil;
 
   @Autowired
-  QueryGeneration queryGenerator;
+  protected QueryGeneration queryGenerator;
 
   @Autowired
-  StringUtil stringUtil;
+  protected RESTErrorUtil restErrorUtil;
 
   @Autowired
-  RESTErrorUtil restErrorUtil;
+  protected DateUtil dateUtil;
 
-  @Autowired
-  DateUtil dateUtil;
-
-  JsonSerializer<Date> jsonDateSerialiazer = null;
-  JsonDeserializer<Date> jsonDateDeserialiazer = null;
+  private JsonSerializer<Date> jsonDateSerialiazer = null;
+  private JsonDeserializer<Date> jsonDateDeserialiazer = null;
 
-  public enum LOG_TYPE {
-    SERVICE {
-      @Override
-      public String getLabel() {
-        return "Service";
-      }
-    },
-    AUDIT {
-      @Override
-      public String getLabel() {
-        return "Audit";
-      }
-    };
-    public abstract String getLabel();
+  public enum LogType {
+    SERVICE("Service"),
+    AUDIT("Audit");
+    
+    private String label;
+    
+    private LogType(String label) {
+      this.label = label;
+    }
+    
+    public String getLabel() {
+      return label;
+    }
   }
 
   public MgrBase() {
     jsonDateSerialiazer = new JsonSerializer<Date>() {
 
       @Override
-      public JsonElement serialize(Date paramT,
-          java.lang.reflect.Type paramType,
-          JsonSerializationContext paramJsonSerializationContext) {
-
+      public JsonElement serialize(Date paramT, java.lang.reflect.Type paramType, JsonSerializationContext paramJsonSerializationContext) {
         return paramT == null ? null : new JsonPrimitive(paramT.getTime());
       }
     };
@@ -111,15 +104,15 @@ public class MgrBase {
     jsonDateDeserialiazer = new JsonDeserializer<Date>() {
 
       @Override
-      public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT,
-          JsonDeserializationContext context) throws JsonParseException {
+      public Date deserialize(JsonElement json, java.lang.reflect.Type typeOfT, JsonDeserializationContext context)
+          throws JsonParseException {
         return json == null ? null : new Date(json.getAsLong());
       }
 
     };
   }
 
-  public String convertObjToString(Object obj) {
+  protected String convertObjToString(Object obj) {
     if (obj == null) {
       return "";
     }
@@ -136,8 +129,7 @@ public class MgrBase {
 
     // Get file from resources folder
     ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("HadoopServiceConfig.json")
-        .getFile());
+    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
 
     try (Scanner scanner = new Scanner(file)) {
 
@@ -150,26 +142,25 @@ public class MgrBase {
 
     } catch (IOException e) {
       logger.error("Unable to read HadoopServiceConfig.json", e);
-      throw restErrorUtil.createRESTException(e.getMessage(),
-          MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     String hadoopServiceConfig = result.toString();
     if (jsonUtil.isJSONValid(hadoopServiceConfig)) {
       return hadoopServiceConfig;
     }
-    throw restErrorUtil.createRESTException("Improper JSON",
-        MessageEnums.ERROR_SYSTEM);
+    throw restErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
 
   }
   
-  protected VSolrLogList getLastPage(SearchCriteria searchCriteria,String logTimeField,SolrDaoBase solrDoaBase,SolrQuery lastPageQuery){
+  protected VSolrLogList getLastPage(SearchCriteria searchCriteria, String logTimeField, SolrDaoBase solrDoaBase,
+      SolrQuery lastPageQuery) {
     
     Integer maxRows = searchCriteria.getMaxRows();
     String givenSortType = searchCriteria.getSortType();
     searchCriteria = new SearchCriteria();
     searchCriteria.setSortBy(logTimeField);
-    if(givenSortType == null || givenSortType.equals(LogSearchConstants.DESCENDING_ORDER)){
+    if (givenSortType == null || givenSortType.equals(LogSearchConstants.DESCENDING_ORDER)) {
       lastPageQuery.removeSort(LogSearchConstants.LOGTIME);
       searchCriteria.setSortType(LogSearchConstants.ASCENDING_ORDER);
     } else {
@@ -185,8 +176,7 @@ public class MgrBase {
     try {
       queryGenerator.setStart(lastPageQuery, 0);
       queryGenerator.setRowCount(lastPageQuery, maxRows);
-      collection = getLogAsPaginationProvided(lastPageQuery,
-          solrDoaBase);
+      collection = getLogAsPaginationProvided(lastPageQuery, solrDoaBase);
       totalLogs = countQuery(lastPageQuery,solrDoaBase);
       if(maxRows != null){
         startIndex = Integer.parseInt("" + ((totalLogs/maxRows) * maxRows));
@@ -209,14 +199,12 @@ public class MgrBase {
 
     } catch (SolrException | SolrServerException | IOException | NumberFormatException e) {
       logger.error("Count Query was not executed successfully",e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return collection;
   }
 
-  public VSolrLogList getLogAsPaginationProvided(SolrQuery solrQuery,
-      SolrDaoBase solrDaoBase) {
+  protected VSolrLogList getLogAsPaginationProvided(SolrQuery solrQuery, SolrDaoBase solrDaoBase) {
     try {
       QueryResponse response = solrDaoBase.process(solrQuery);
       VSolrLogList collection = new VSolrLogList();
@@ -235,14 +223,11 @@ public class MgrBase {
       return collection;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
-
   }
   
-  public Long countQuery(SolrQuery query,SolrDaoBase solrDaoBase) throws SolrException,
- SolrServerException, IOException {
+  protected Long countQuery(SolrQuery query,SolrDaoBase solrDaoBase) throws SolrException, SolrServerException, IOException {
     query.setRows(0);
     QueryResponse response = solrDaoBase.process(query);
     if (response == null) {
@@ -256,14 +241,14 @@ public class MgrBase {
   }
 
   protected String getUnit(String unit) {
-    if (stringUtil.isEmpty(unit)) {
+    if (StringUtils.isBlank(unit)) {
       unit = "+1HOUR";
     }
     return unit;
   }
 
   protected String getFrom(String from) {
-    if (stringUtil.isEmpty(from)) {
+    if (StringUtils.isBlank(from)) {
       Date date =  dateUtil.getTodayFromDate();
       try {
         from = dateUtil.convertGivenDateFormatToSolrDateFormat(date);
@@ -275,7 +260,7 @@ public class MgrBase {
   }
 
   protected String getTo(String to) {
-    if (stringUtil.isEmpty(to)) {
+    if (StringUtils.isBlank(to)) {
       to = "NOW";
     }
     return to;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
index 0dccb74..398d270 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/PublicMgr.java
@@ -31,7 +31,7 @@ import org.springframework.stereotype.Component;
 @Component
 public class PublicMgr extends MgrBase {
   @Autowired
-  LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
+  private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
 
   public String getGeneralConfig() {
     VNameValueList nameValueList = new VNameValueList();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
index dbc14e1..c6f7dc5 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
@@ -18,11 +18,7 @@
  */
 package org.apache.ambari.logsearch.manager;
 
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.ambari.logsearch.common.UserSessionInfo;
-import org.apache.ambari.logsearch.security.context.LogsearchContextHolder;
-import org.apache.ambari.logsearch.security.context.LogsearchSecurityContext;
+import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.log4j.Logger;
 import org.springframework.security.core.Authentication;
@@ -33,38 +29,28 @@ import org.springframework.stereotype.Component;
 @Component
 public class SessionMgr {
 
-  static final Logger logger = Logger.getLogger(SessionMgr.class);
+  private static final Logger logger = Logger.getLogger(SessionMgr.class);
 
   public SessionMgr() {
     logger.debug("SessionManager created");
   }
 
-  public UserSessionInfo processSuccessLogin(int authType, String userAgent) {
-    return processSuccessLogin(authType, userAgent, null);
-  }
-
-  public UserSessionInfo processSuccessLogin(int authType, String userAgent, HttpServletRequest httpRequest) {
+  public User processSuccessLogin() {
     boolean newSessionCreation = true;
-    UserSessionInfo userSession = null;
-    LogsearchSecurityContext context = LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      userSession = context.getUserSession();
-    }
     Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
     WebAuthenticationDetails details = (WebAuthenticationDetails) authentication.getDetails();
     String currentLoginId = authentication.getName();
-    if (userSession != null) {
-      if (validateUserSession(userSession, currentLoginId)) {
+    LogSearchContext context = LogSearchContext.getContext();
+    User user = context.getUser();
+    if (user != null) {
+      if (validateUser(user, currentLoginId)) {
         newSessionCreation = false;
       }
     }
     //
     if (newSessionCreation) {
-      // // Need to build the UserSession
-      userSession = new UserSessionInfo();
-      User user = new User();
+      user = new User();
       user.setUsername(currentLoginId);
-      userSession.setUser(user);
       if (details != null) {
         logger.info("Login Success: loginId=" + currentLoginId + ", sessionId=" + details.getSessionId()
           + ", requestId=" + details.getRemoteAddress());
@@ -74,15 +60,15 @@ public class SessionMgr {
 
     }
 
-    return userSession;
+    return user;
   }
 
-  protected boolean validateUserSession(UserSessionInfo userSession, String currentUsername) {
-    if (currentUsername.equalsIgnoreCase(userSession.getUser().getUsername())) {
+  private boolean validateUser(User user, String currentUsername) {
+    if (currentUsername.equalsIgnoreCase(user.getUsername())) {
       return true;
     } else {
       logger.info("loginId doesn't match loginId from HTTPSession. Will create new session. loginId="
-        + currentUsername + ", userSession=" + userSession, new Exception());
+        + currentUsername + ", user=" + user, new Exception());
       return false;
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
index c4fbd9f..28f806c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
@@ -31,10 +31,10 @@ import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
 import org.apache.ambari.logsearch.query.QueryGeneration;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
 import org.apache.ambari.logsearch.view.VUserConfig;
 import org.apache.ambari.logsearch.view.VUserConfigList;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -50,35 +50,26 @@ import org.springframework.stereotype.Component;
 @Component
 public class UserConfigMgr extends MgrBase {
 
-  static Logger logger = Logger.getLogger(UserConfigMgr.class);
+  private static final Logger logger = Logger.getLogger(UserConfigMgr.class);
 
   @Autowired
-  UserConfigSolrDao userConfigSolrDao;
-
-  @Autowired
-  SolrUtil solrUtil;
-
+  private UserConfigSolrDao userConfigSolrDao;
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
+  private SolrUtil solrUtil;
   @Autowired
-  QueryGeneration queryGenerator;
-
+  private RESTErrorUtil restErrorUtil;
   @Autowired
-  StringUtil stringUtil;
+  private QueryGeneration queryGenerator;
 
   public String saveUserConfig(VUserConfig vHistory) {
 
     SolrInputDocument solrInputDoc = new SolrInputDocument();
     if (!isValid(vHistory)) {
-      throw restErrorUtil.createRESTException("No FilterName Specified",
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("No FilterName Specified", MessageEnums.INVALID_INPUT_DATA);
     }
 
     if (isNotUnique(vHistory) && !vHistory.isOverwrite()) {
-      throw restErrorUtil.createRESTException(
-          "Name '" + vHistory.getFiltername() + "' already exists",
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException( "Name '" + vHistory.getFiltername() + "' already exists", MessageEnums.INVALID_INPUT_DATA);
     }
     String loggedInUserName = vHistory.getUserName();
     String filterName = vHistory.getFiltername();
@@ -95,25 +86,20 @@ public class UserConfigMgr extends MgrBase {
     // Check whether the Filter Name exists in solr
     SolrQuery solrQuery = new SolrQuery();
     queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
-    queryGenerator.setSingleIncludeFilter(solrQuery,
-        LogSearchConstants.USER_NAME, loggedInUserName);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.USER_NAME, loggedInUserName);
     try {
       QueryResponse queryResponse = userConfigSolrDao.process(solrQuery);
       if (queryResponse != null) {
         SolrDocumentList documentList = queryResponse.getResults();
-        if (documentList != null && !documentList.isEmpty()
-            && !vHistory.isOverwrite()) {
+        if (documentList != null && !documentList.isEmpty() && !vHistory.isOverwrite()) {
           logger.error("Filtername is already present");
-          throw restErrorUtil.createRESTException(
-              "Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
+          throw restErrorUtil.createRESTException("Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
         }
       }
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error in checking same filtername config", e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     try {
@@ -121,8 +107,7 @@ public class UserConfigMgr extends MgrBase {
       return convertObjToString(solrInputDoc);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error saving user config. solrDoc=" + solrInputDoc, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -133,12 +118,10 @@ public class UserConfigMgr extends MgrBase {
     if (filterName != null && rowType != null) {
       SolrQuery solrQuery = new SolrQuery();
       filterName = solrUtil.makeSearcableString(filterName);
-      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName
-          + "-" + rowType);
+      solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName + "-" + rowType);
       queryGenerator.setRowCount(solrQuery, 0);
       try {
-        Long numFound = userConfigSolrDao.process(solrQuery).getResults()
-            .getNumFound();
+        Long numFound = userConfigSolrDao.process(solrQuery).getResults().getNumFound();
         if (numFound > 0) {
           return true;
         }
@@ -150,18 +133,17 @@ public class UserConfigMgr extends MgrBase {
   }
 
   private boolean isValid(VUserConfig vHistory) {
-    return !stringUtil.isEmpty(vHistory.getFiltername())
-        && !stringUtil.isEmpty(vHistory.getRowType())
-        && !stringUtil.isEmpty(vHistory.getUserName())
-        && !stringUtil.isEmpty(vHistory.getValues());
+    return !StringUtils.isBlank(vHistory.getFiltername())
+        && !StringUtils.isBlank(vHistory.getRowType())
+        && !StringUtils.isBlank(vHistory.getUserName())
+        && !StringUtils.isBlank(vHistory.getValues());
   }
 
   public void deleteUserConfig(String id) {
     try {
       userConfigSolrDao.deleteUserConfig(id);
     } catch (SolrException | SolrServerException | IOException e) {
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -171,40 +153,31 @@ public class UserConfigMgr extends MgrBase {
     SolrDocumentList solrList = new SolrDocumentList();
     VUserConfigList userConfigList = new VUserConfigList();
 
-    String rowType = (String) searchCriteria
-        .getParamValue(LogSearchConstants.ROW_TYPE);
-    if (stringUtil.isEmpty(rowType)) {
-      throw restErrorUtil.createRESTException("row type was not specified",
-          MessageEnums.INVALID_INPUT_DATA);
+    String rowType = (String) searchCriteria.getParamValue(LogSearchConstants.ROW_TYPE);
+    if (StringUtils.isBlank(rowType)) {
+      throw restErrorUtil.createRESTException("row type was not specified", MessageEnums.INVALID_INPUT_DATA);
     }
 
-    String userName = (String) searchCriteria
-        .getParamValue(LogSearchConstants.USER_NAME);
-    if (stringUtil.isEmpty(userName)) {
-      throw restErrorUtil.createRESTException("user name was not specified",
-          MessageEnums.INVALID_INPUT_DATA);
+    String userName = (String) searchCriteria.getParamValue(LogSearchConstants.USER_NAME);
+    if (StringUtils.isBlank(userName)) {
+      throw restErrorUtil.createRESTException("user name was not specified", MessageEnums.INVALID_INPUT_DATA);
     }
-    String filterName = (String) searchCriteria
-        .getParamValue(LogSearchConstants.FILTER_NAME);
-    filterName = stringUtil.isEmpty(filterName) ? "*" : "*" + filterName + "*";
+    String filterName = (String) searchCriteria.getParamValue(LogSearchConstants.FILTER_NAME);
+    filterName = StringUtils.isBlank(filterName) ? "*" : "*" + filterName + "*";
 
     try {
 
       SolrQuery userConfigQuery = new SolrQuery();
       queryGenerator.setMainQuery(userConfigQuery, null);
       queryGenerator.setPagination(userConfigQuery, searchCriteria);
-      queryGenerator.setSingleIncludeFilter(userConfigQuery,
-          LogSearchConstants.ROW_TYPE, rowType);
-      queryGenerator.setSingleORFilter(userConfigQuery,
-          LogSearchConstants.USER_NAME, userName,
-          LogSearchConstants.SHARE_NAME_LIST, userName);
-      queryGenerator.setSingleIncludeFilter(userConfigQuery,
-          LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
-
-      if (stringUtil.isEmpty(searchCriteria.getSortBy())) {
+      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.ROW_TYPE, rowType);
+      queryGenerator.setSingleORFilter(userConfigQuery, LogSearchConstants.USER_NAME, userName, LogSearchConstants.SHARE_NAME_LIST, userName);
+      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
+
+      if (StringUtils.isBlank(searchCriteria.getSortBy())) {
         searchCriteria.setSortBy(LogSearchConstants.FILTER_NAME);
       }
-      if (stringUtil.isEmpty(searchCriteria.getSortType())) {
+      if (StringUtils.isBlank(searchCriteria.getSortType())) {
         searchCriteria.setSortType("" + SolrQuery.ORDER.asc);
       }
 
@@ -215,14 +188,12 @@ public class UserConfigMgr extends MgrBase {
 
       for (SolrDocument solrDoc : solrList) {
         VUserConfig userConfig = new VUserConfig();
-        userConfig.setFiltername(""
-            + solrDoc.get(LogSearchConstants.FILTER_NAME));
+        userConfig.setFiltername("" + solrDoc.get(LogSearchConstants.FILTER_NAME));
         userConfig.setId("" + solrDoc.get(LogSearchConstants.ID));
         userConfig.setValues("" + solrDoc.get(LogSearchConstants.VALUES));
         userConfig.setRowType("" + solrDoc.get(LogSearchConstants.ROW_TYPE));
         try {
-          List<String> shareNameList = (List<String>) solrDoc
-              .get(LogSearchConstants.SHARE_NAME_LIST);
+          List<String> shareNameList = (List<String>) solrDoc.get(LogSearchConstants.SHARE_NAME_LIST);
           userConfig.setShareNameList(shareNameList);
         } catch (Exception e) {
           // do nothing
@@ -243,8 +214,7 @@ public class UserConfigMgr extends MgrBase {
     } catch (SolrException | SolrServerException | IOException e) {
       // do nothing
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     return convertObjToString(userConfigList);
@@ -255,43 +225,30 @@ public class UserConfigMgr extends MgrBase {
     return saveUserConfig(vuserConfig);
   }
 
-  // ////////////////////////////LEVEL
-  // FILTER/////////////////////////////////////
+  // ////////////////////////////LEVEL FILTER/////////////////////////////////////
 
-  /**
-   * @return
-   */
   public String getUserFilter() {
     VLogfeederFilterWrapper userFilter;
     try {
       userFilter = userConfigSolrDao.getUserFilter();
     } catch (SolrServerException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return convertObjToString(userFilter);
   }
 
-  /**
-   * Creating filter for logfeeder
-   *
-   * @param String
-   * @return
-   */
   public String saveUserFiter(String json) {
-    if (!stringUtil.isEmpty(json)) {
-      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil
-          .jsonToObj(json, VLogfeederFilterWrapper.class);
+    if (!StringUtils.isBlank(json)) {
+      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
       try {
         if (logfeederFilterWrapper == null) {
           logger.error(json + " is a invalid json");
         }
-        userConfigSolrDao.saveUserFiter(logfeederFilterWrapper);
+        userConfigSolrDao.saveUserFilter(logfeederFilterWrapper);
       } catch (SolrException | SolrServerException | IOException e) {
         logger.error("user config not able to save", e);
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
     }
     return getUserFilter();
@@ -307,20 +264,16 @@ public class UserConfigMgr extends MgrBase {
       if (queryResponse == null) {
         return convertObjToString(userList);
       }
-      List<Count> counList = queryResponse.getFacetField(
-          LogSearchConstants.USER_NAME).getValues();
+      List<Count> counList = queryResponse.getFacetField(LogSearchConstants.USER_NAME).getValues();
       for (Count cnt : counList) {
         String userName = cnt.getName();
         userList.add(userName);
       }
     } catch (SolrException | SolrServerException | IOException e) {
       logger.warn("Error getting all users.", e);
-      // do nothing
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
-          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return convertObjToString(userList);
-
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index d934e84..0c8be45 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -28,7 +28,7 @@ import java.util.regex.Pattern;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringUtils;
@@ -48,7 +48,7 @@ public class QueryGeneration extends QueryGenerationBase {
   private static Logger logger = Logger.getLogger(QueryGeneration.class);
 
   public SolrQuery commonServiceFilterQuery(SearchCriteria searchCriteria) {
-    LOG_TYPE logType = LOG_TYPE.SERVICE;
+    LogType logType = LogType.SERVICE;
     SolrQuery solrQuery = new SolrQuery();
     String treeParams = (String) searchCriteria.getParamValue("treeParams");
     String givenQuery = (String) searchCriteria.getParamValue("q");
@@ -59,19 +59,15 @@ public class QueryGeneration extends QueryGenerationBase {
     String eMessage = (String) searchCriteria.getParamValue("eMessage");
     String gEmessage = (String) searchCriteria.getParamValue("gEMessage");
     String selectedComp = (String) searchCriteria.getParamValue("selectComp");
-    String bundleId = (String) searchCriteria
-        .getParamValue(LogSearchConstants.BUNDLE_ID);
-    String globalExcludeComp = (String) searchCriteria
-        .getParamValue("gMustNot");
-    String unselectedComp = (String) searchCriteria
-        .getParamValue("unselectComp");
+    String bundleId = (String) searchCriteria.getParamValue(LogSearchConstants.BUNDLE_ID);
+    String globalExcludeComp = (String) searchCriteria.getParamValue("gMustNot");
+    String unselectedComp = (String) searchCriteria.getParamValue("unselectComp");
     String urlHostName = (String) searchCriteria.getParamValue("host_name");
-    String urlComponentName = (String) searchCriteria
-        .getParamValue("component_name");
+    String urlComponentName = (String) searchCriteria.getParamValue("component_name");
     String file_name = (String) searchCriteria.getParamValue("file_name");
     String advQuery = (String) searchCriteria.getParamValue("advanceSearch");
     // build advance query
-    if (!stringUtil.isEmpty(advQuery)) {
+    if (!StringUtils.isBlank(advQuery)) {
       String advQueryParameters[] = advQuery.split(Pattern.quote("}{"));
       SolrQuery advSolrQuery = new SolrQuery();
       for (String queryParam : advQueryParameters) {
@@ -79,25 +75,13 @@ public class QueryGeneration extends QueryGenerationBase {
         if (params != null && params.length > 1)
           advSolrQuery.setParam(params[0], params[1]);
       }
-      // Building and adding levels to filters
-      setFilterClauseWithFieldName(advSolrQuery, level,
-          LogSearchConstants.SOLR_LEVEL, "", CONDITION.OR);
+      setFilterClauseWithFieldName(advSolrQuery, level, LogSearchConstants.SOLR_LEVEL, "", Condition.OR);
+      setSingleRangeFilter(advSolrQuery, LogSearchConstants.LOGTIME, startTime, endTime);
+      setFilterClauseWithFieldName(advSolrQuery, unselectedComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
+          Condition.AND);
+      setFilterClauseWithFieldName(advSolrQuery, selectedComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.NO_OPERATOR,
+          Condition.OR);
 
-      // Adding Logtime to filters
-      setSingleRangeFilter(advSolrQuery, LogSearchConstants.LOGTIME, startTime,
-          endTime);
-
-      // Building and adding exlcude components to filters
-      setFilterClauseWithFieldName(advSolrQuery, unselectedComp,
-          LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
-          CONDITION.AND);
-
-      // Building and adding exlcude components to filters
-      setFilterClauseWithFieldName(advSolrQuery, selectedComp,
-          LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.NO_OPERATOR,
-          CONDITION.OR);
-
-      // Set Pagination
       setPagination(advSolrQuery, searchCriteria);
 
       return advSolrQuery;
@@ -105,140 +89,71 @@ public class QueryGeneration extends QueryGenerationBase {
 
     setMainQuery(solrQuery, givenQuery);
 
-    // Adding Logtime to filters
-    setSingleRangeFilter(solrQuery, LogSearchConstants.LOGTIME, startTime,
-        endTime);
-
-    // String mainFilterQuery = buildQueryFromJSONCompHost(jsonHCNames,
-    // selectedComp);
-
-    // if (mainFilterQuery != null && !mainFilterQuery.equals(""))
-    // solrQuery.addFilterQuery(mainFilterQuery);
-
-    // add component filter
-    addFilter(solrQuery, selectedComp, LogSearchConstants.SOLR_COMPONENT,
-        CONDITION.OR);
-
-    // add treeParams filter
-    // hosts comma separated list
-    addFilterQueryFromArray(solrQuery, treeParams,
-        LogSearchConstants.SOLR_HOST, CONDITION.OR);
-
-    // Building and adding levels to filters
-    setFilterClauseWithFieldName(solrQuery, level,
-        LogSearchConstants.SOLR_LEVEL, LogSearchConstants.NO_OPERATOR,
-        CONDITION.OR);
+    setSingleRangeFilter(solrQuery, LogSearchConstants.LOGTIME, startTime, endTime);
+    addFilter(solrQuery, selectedComp, LogSearchConstants.SOLR_COMPONENT, Condition.OR);
+    addFilterQueryFromArray(solrQuery, treeParams, LogSearchConstants.SOLR_HOST, Condition.OR);
 
-    // Building and adding include string to filters
-    setFilterClauseForSolrSearchableString(solrQuery, iMessage, CONDITION.OR,
-        LogSearchConstants.NO_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
+    setFilterClauseWithFieldName(solrQuery, level, LogSearchConstants.SOLR_LEVEL, LogSearchConstants.NO_OPERATOR, Condition.OR);
 
-    // Building and adding global exclude string to filters
-    setFilterClauseForSolrSearchableString(solrQuery, gEmessage, CONDITION.AND,
-        LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, iMessage, Condition.OR, LogSearchConstants.NO_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, gEmessage, Condition.AND, LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
+    setFilterClauseForSolrSearchableString(solrQuery, eMessage, Condition.AND, LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
 
-    // Building and adding exclude string to filter
-    setFilterClauseForSolrSearchableString(solrQuery, eMessage, CONDITION.AND,
-        LogSearchConstants.MINUS_OPERATOR, LogSearchConstants.SOLR_KEY_LOG_MESSAGE);
-
-    // Building and adding logfile to filters
     applyLogFileFilter(solrQuery, searchCriteria);
 
-    // Building and adding exclude components to filters
-    setFilterClauseWithFieldName(solrQuery, globalExcludeComp,
-        LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
-        CONDITION.AND);
-
-    // Building and adding exlcude components to filters
-    setFilterClauseWithFieldName(solrQuery, unselectedComp,
-        LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
-        CONDITION.AND);
+    setFilterClauseWithFieldName(solrQuery, globalExcludeComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
+    setFilterClauseWithFieldName(solrQuery, unselectedComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
 
-    // Building and adding host names given url
-    // setFilterClauseWithFieldName(solrQuery, urlHostName,
-    // LogSearchConstants.SOLR_HOST,
-    // "", "OR");
     urlHostName = solrUtil.escapeQueryChars(urlHostName);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, urlHostName);
-    //
-    // //Building and addding component names given url
-    // setFilterClauseWithFieldName(solrQuery, urlComponents,
-    // LogSearchConstants.SOLR_COMPONENT,
-    // "", "OR");
     urlComponentName = solrUtil.escapeQueryChars(urlComponentName);
-    setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT,
-        urlComponentName);
+    setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, urlComponentName);
 
-    // Set Pagination
     setPagination(solrQuery, searchCriteria);
-
-    // SetSort type (by default Descending)
     setSortOrderDefaultServiceLog(solrQuery, searchCriteria);
-
-    // Set Bundle Id
     setSingleIncludeFilter(solrQuery, LogSearchConstants.BUNDLE_ID, bundleId);
-
-    // Set filename
     file_name = solrUtil.escapeQueryChars(file_name);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_PATH, file_name);
-    // include query
-    this.setUserSpecificFilter(searchCriteria, solrQuery,
-        LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY,
-        logType);
-    // exclude query
-    this.setUserSpecificFilter(searchCriteria, solrQuery,
-        LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY,
-        logType);
+    setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY, logType);
+    setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY, logType);
+    
     return solrQuery;
   }
 
-  public void applyLogFileFilter(SolrQuery solrQuery,
-      SearchCriteria searchCriteria) {
+  public void applyLogFileFilter(SolrQuery solrQuery, SearchCriteria searchCriteria) {
     String hostLogFile = (String) searchCriteria.getParamValue("hostLogFile");
     String compLogFile = (String) searchCriteria.getParamValue("compLogFile");
     String givenQuery = (String) searchCriteria.getParamValue("q");
     String logfileQuery = "";
-    if (!stringUtil.isEmpty(hostLogFile) && !stringUtil.isEmpty(compLogFile)) {
-      logfileQuery = LogSearchConstants.SOLR_HOST + ":" + hostLogFile + " "
-          + CONDITION.AND + " " + LogSearchConstants.SOLR_COMPONENT + ":"
-          + compLogFile;
-      if (!stringUtil.isEmpty(givenQuery)) {
-        logfileQuery = "(" + givenQuery + ") " + CONDITION.AND + " ("
-            + logfileQuery + ")";
+    if (!StringUtils.isBlank(hostLogFile) && !StringUtils.isBlank(compLogFile)) {
+      logfileQuery = LogSearchConstants.SOLR_HOST + ":" + hostLogFile + " " + Condition.AND + " " +
+          LogSearchConstants.SOLR_COMPONENT + ":" + compLogFile;
+      if (!StringUtils.isBlank(givenQuery)) {
+        logfileQuery = "(" + givenQuery + ") " + Condition.AND + " (" + logfileQuery + ")";
       }
-      if (!stringUtil.isEmpty(logfileQuery)) {
+      if (!StringUtils.isBlank(logfileQuery)) {
         solrQuery.addFilterQuery(logfileQuery);
       }
     }
   }
 
-  public void setUserSpecificFilter(SearchCriteria searchCriteria,
-      SolrQuery solrQuery, String paramName, String operation, LOG_TYPE logType) {
+  private void setUserSpecificFilter(SearchCriteria searchCriteria, SolrQuery solrQuery, String paramName, String operation,
+      LogType logType) {
     String queryString = (String) searchCriteria.getParamValue(paramName);
-    String columnQuery = (String) searchCriteria
-        .getParamValue(LogSearchConstants.COLUMN_QUERY);
-    if (stringUtil.isEmpty(queryString)) {
+    String columnQuery = (String) searchCriteria.getParamValue(LogSearchConstants.COLUMN_QUERY);
+    if (StringUtils.isBlank(queryString)) {
       queryString = null;
     }
-    // if (!stringUtil.isEmpty(queryString) && "[]".equals(queryString)) {
-    // queryString = null;
-    // }
-    if (!stringUtil.isEmpty(columnQuery) && stringUtil.isEmpty(queryString)
-        && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
+    if (!StringUtils.isBlank(columnQuery) && StringUtils.isBlank(queryString) && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
       queryString = columnQuery;
     }
     List<String> conditionQuries = new ArrayList<String>();
     List<String> referalConditionQuries = new ArrayList<String>();
     List<String> elments = new ArrayList<String>();
-    // convert json to list of hashmap
-    List<HashMap<String, Object>> queryList = jsonUtil
-        .jsonToMapObjectList(queryString);
-    // null and size check
+    List<HashMap<String, Object>> queryList = jsonUtil.jsonToMapObjectList(queryString);
     if (queryList != null && queryList.size() > 0) {
-      if (!stringUtil.isEmpty(columnQuery) && !columnQuery.equals(queryString)
-          && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
-        List<HashMap<String, Object>> columnQueryList = jsonUtil
-            .jsonToMapObjectList(columnQuery);
+      if (!StringUtils.isBlank(columnQuery) && !columnQuery.equals(queryString) && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
+        List<HashMap<String, Object>> columnQueryList = jsonUtil.jsonToMapObjectList(columnQuery);
         if (columnQueryList != null && columnQueryList.size() > 0) {
           queryList.addAll(columnQueryList);
         }
@@ -248,12 +163,11 @@ public class QueryGeneration extends QueryGenerationBase {
         StringBuilder field = new StringBuilder();
         if (columnListMap != null) {
           for (String key : columnListMap.keySet()) {
-            if (!stringUtil.isEmpty(key)) {
+            if (!StringUtils.isBlank(key)) {
               String originalKey = getOriginalKey(key, logType);
-              String value = getOriginalValue(originalKey,
-                  "" + columnListMap.get(key));
+              String value = getOriginalValue(originalKey, "" + columnListMap.get(key));
               orQuery = putWildCardByType(value, originalKey, logType);
-              if (stringUtil.isEmpty(orQuery)) {
+              if (StringUtils.isBlank(orQuery)) {
                 logger.debug("Removing invalid filter for key :"+originalKey +" and value :" +value );
                 continue;
               }
@@ -264,8 +178,7 @@ public class QueryGeneration extends QueryGenerationBase {
               if (isSame && !operation.equals(LogSearchConstants.EXCLUDE_QUERY)) {
                 for (String tempCondition : conditionQuries) {
                   if (tempCondition.contains(originalKey)) {
-                    String newCondtion = tempCondition + " "
-                        + CONDITION.OR.name() + " " + orQuery;
+                    String newCondtion = tempCondition + " " + Condition.OR.name() + " " + orQuery;
                     referalConditionQuries.remove(tempCondition);
                     referalConditionQuries.add(newCondtion);
                   }
@@ -283,17 +196,16 @@ public class QueryGeneration extends QueryGenerationBase {
         }
       }
     }
-    if (!referalConditionQuries.isEmpty() && !stringUtil.isEmpty(operation)) {
-      if (operation.equals(LogSearchConstants.INCLUDE_QUERY)
-          || operation.equals(LogSearchConstants.COLUMN_QUERY)) {
+    if (!referalConditionQuries.isEmpty() && !StringUtils.isBlank(operation)) {
+      if (operation.equals(LogSearchConstants.INCLUDE_QUERY) || operation.equals(LogSearchConstants.COLUMN_QUERY)) {
         for (String filter : referalConditionQuries) {
-          if (!stringUtil.isEmpty(filter)) {
+          if (!StringUtils.isBlank(filter)) {
             solrQuery.addFilterQuery(filter);
           }
         }
       } else if (operation.equals(LogSearchConstants.EXCLUDE_QUERY)) {
         for (String filter : referalConditionQuries) {
-          if (!stringUtil.isEmpty(filter)) {
+          if (!StringUtils.isBlank(filter)) {
             filter = LogSearchConstants.MINUS_OPERATOR + filter;
             solrQuery.addFilterQuery(filter);
           }
@@ -303,40 +215,23 @@ public class QueryGeneration extends QueryGenerationBase {
   }
 
   public SolrQuery commonAuditFilterQuery(SearchCriteria searchCriteria) {
-    LOG_TYPE logType = LOG_TYPE.AUDIT;
+    LogType logType = LogType.AUDIT;
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
     String startTime = (String) searchCriteria.getParamValue("startTime");
     String endTime = (String) searchCriteria.getParamValue("endTime");
-    String selectedComp = (String) searchCriteria
-        .getParamValue("includeString");
-    this.setFilterClauseWithFieldName(solrQuery, selectedComp,
-        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.NO_OPERATOR,
-        CONDITION.OR);
-    String globalExcludeComp = (String) searchCriteria
-        .getParamValue("gMustNot");
-    this.setUserSpecificFilter(searchCriteria, solrQuery,
-        LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY,
-        logType);
-    this.setUserSpecificFilter(searchCriteria, solrQuery,
-        LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY,
-        logType);
-    String unselectedComp = (String) searchCriteria
-        .getParamValue("unselectComp");
-    this.setFilterClauseWithFieldName(solrQuery, globalExcludeComp,
-        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
-        CONDITION.AND);
-    // Building and adding exlcude components to filters
-    this.setFilterClauseWithFieldName(solrQuery, unselectedComp,
-        LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR,
-        CONDITION.AND);
-    // Adding Logtime to filters
-    this.setSingleRangeFilter(solrQuery, LogSearchConstants.AUDIT_EVTTIME,
-        startTime, endTime);
-    this.setPagination(solrQuery, searchCriteria);
+    String selectedComp = (String) searchCriteria.getParamValue("includeString");
+    setFilterClauseWithFieldName(solrQuery, selectedComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.NO_OPERATOR, Condition.OR);
+    String globalExcludeComp = (String) searchCriteria.getParamValue("gMustNot");
+    setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY, logType);
+    setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY, logType);
+    String unselectedComp = (String) searchCriteria.getParamValue("unselectComp");
+    setFilterClauseWithFieldName(solrQuery, globalExcludeComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
+    setFilterClauseWithFieldName(solrQuery, unselectedComp, LogSearchConstants.AUDIT_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
+    setSingleRangeFilter(solrQuery, LogSearchConstants.AUDIT_EVTTIME, startTime, endTime);
+    setPagination(solrQuery, searchCriteria);
     try {
-      if (searchCriteria.getSortBy() == null
-          || searchCriteria.getSortBy().isEmpty()) {
+      if (searchCriteria.getSortBy() == null || searchCriteria.getSortBy().isEmpty()) {
         searchCriteria.setSortBy(LogSearchConstants.AUDIT_EVTTIME);
         searchCriteria.setSortType(SolrQuery.ORDER.desc.toString());
       }
@@ -344,11 +239,11 @@ public class QueryGeneration extends QueryGenerationBase {
       searchCriteria.setSortBy(LogSearchConstants.AUDIT_EVTTIME);
       searchCriteria.setSortType(SolrQuery.ORDER.desc.toString());
     }
-    this.setSortOrderDefaultServiceLog(solrQuery, searchCriteria);
+    setSortOrderDefaultServiceLog(solrQuery, searchCriteria);
     return solrQuery;
   }
 
-  private String putWildCardByType(String str, String key, LOG_TYPE logType) {
+  private String putWildCardByType(String str, String key, LogType logType) {
     String fieldType;
     SolrDaoBase solrDaoBase = null;
     switch (logType) {
@@ -368,10 +263,10 @@ public class QueryGeneration extends QueryGenerationBase {
       logger.error("Invalid logtype :" + logType);
       fieldType = null;
     }
-    if (!stringUtil.isEmpty(fieldType)) {
+    if (!StringUtils.isBlank(fieldType)) {
       if (solrUtil.isSolrFieldNumber(fieldType, solrDaoBase)) {
         String value = putEscapeCharacterForNumber(str, fieldType,solrDaoBase);
-        if (!stringUtil.isEmpty(value)) {
+        if (!StringUtils.isBlank(value)) {
           return key + ":" + value;
         } else {
           return null;
@@ -388,7 +283,7 @@ public class QueryGeneration extends QueryGenerationBase {
   }
 
   private String putEscapeCharacterForNumber(String str,String fieldType,SolrDaoBase solrDaoBase) {
-    if (!stringUtil.isEmpty(str)) {
+    if (!StringUtils.isBlank(str)) {
       str = str.replace("*", "");
     }
     String escapeCharSting = parseInputValueAsPerFieldType(str,fieldType,solrDaoBase);
@@ -420,28 +315,25 @@ public class QueryGeneration extends QueryGenerationBase {
 
   private String getOriginalValue(String name, String value) {
     String solrValue = PropertiesUtil.getProperty(name);
-    if (stringUtil.isEmpty(solrValue)) {
+    if (StringUtils.isBlank(solrValue)) {
       return value;
     }
     try {
-      String propertyFieldMappings[] = solrValue
-          .split(LogSearchConstants.LIST_SEPARATOR);
+      String propertyFieldMappings[] = solrValue.split(LogSearchConstants.LIST_SEPARATOR);
       if (propertyFieldMappings != null && propertyFieldMappings.length > 0) {
         HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
         for (String temp : propertyFieldMappings) {
-          if (!stringUtil.isEmpty(temp)) {
+          if (!StringUtils.isBlank(temp)) {
             String arrayValue[] = temp.split(":");
             if (arrayValue.length > 1) {
-              propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH),
-                  arrayValue[1].toLowerCase(Locale.ENGLISH));
+              propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH), arrayValue[1].toLowerCase(Locale.ENGLISH));
             } else {
               logger.warn("array length is less than required length 1");
             }
           }
         }
-        String originalValue = propertyFieldValue.get(value
-            .toLowerCase(Locale.ENGLISH));
-        if (!stringUtil.isEmpty(originalValue)) {
+        String originalValue = propertyFieldValue.get(value.toLowerCase(Locale.ENGLISH));
+        if (!StringUtils.isBlank(originalValue)) {
           return originalValue;
         }
       }
@@ -451,35 +343,29 @@ public class QueryGeneration extends QueryGenerationBase {
     return value;
   }
 
-  private String getOriginalKey(String key, LOG_TYPE logType) {
+  private String getOriginalKey(String key, LogType logType) {
     String originalKey;
     switch (logType) {
     case AUDIT:
-      originalKey = ConfigUtil.auditLogsColumnMapping.get(key
-          + LogSearchConstants.UI_SUFFIX);
+      originalKey = ConfigUtil.auditLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
       break;
     case SERVICE:
-      originalKey = ConfigUtil.serviceLogsColumnMapping.get(key
-          + LogSearchConstants.UI_SUFFIX);
+      originalKey = ConfigUtil.serviceLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
       break;
     default:
       originalKey = null;
-      // set as null
     }
-    if (stringUtil.isEmpty(originalKey)) {
-      // return default values
+    if (StringUtils.isBlank(originalKey)) {
       return key;
     }
     return originalKey;
   }
   
-  public boolean checkTokenizer(String fieldType,Class tokenizerFactoryClass,SolrDaoBase solrDaoBase) {
+  private boolean checkTokenizer(String fieldType, Class tokenizerFactoryClass, SolrDaoBase solrDaoBase) {
     HashMap<String, Object> fieldTypeMap = solrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
-    HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeMap
-        .get("analyzer");
+    HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeMap.get("analyzer");
     if (analyzer != null) {
-      HashMap<String, Object> tokenizerMap = (HashMap<String, Object>) analyzer
-          .get("tokenizer");
+      HashMap<String, Object> tokenizerMap = (HashMap<String, Object>) analyzer.get("tokenizer");
       if (tokenizerMap != null) {
         String tokenizerClass = (String) tokenizerMap.get("class");
         if (!StringUtils.isEmpty(tokenizerClass)) {
@@ -493,4 +379,4 @@ public class QueryGeneration extends QueryGenerationBase {
     }
     return false;
   }
-}
\ No newline at end of file
+}


[11/50] [abbrv] ambari git commit: AMBARI-18225. Fixed search issue for number field in Logsearch portal (Hayat Behlim via oleewere)

Posted by ol...@apache.org.
AMBARI-18225. Fixed search issue for number field in Logsearch portal (Hayat Behlim via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4562dcd6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4562dcd6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4562dcd6

Branch: refs/heads/branch-dev-logsearch
Commit: 4562dcd6d9db5ae550b0ba9cb5b3215ccaf4fbec
Parents: 29c66f7
Author: oleewere <ol...@gmail.com>
Authored: Mon Aug 22 15:30:47 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-portal/pom.xml             |  21 ++++
 .../logsearch/common/LogSearchConstants.java    |   2 -
 .../ambari/logsearch/dao/SolrDaoBase.java       |  44 ++++----
 .../apache/ambari/logsearch/dao/UserDao.java    |   3 +
 .../logsearch/graph/GraphDataGenerator.java     |  22 ++--
 .../ambari/logsearch/manager/AuditMgr.java      |   6 +-
 .../ambari/logsearch/manager/LogsMgr.java       |   5 +-
 .../ambari/logsearch/query/QueryGeneration.java | 103 ++++++++++++++-----
 .../logsearch/query/QueryGenerationBase.java    |  11 ++
 .../ambari/logsearch/util/ConfigUtil.java       |  70 +++++++------
 .../apache/ambari/logsearch/util/SolrUtil.java  |  54 ++++++++++
 11 files changed, 243 insertions(+), 98 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index 349531e..a886b37 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -535,6 +535,27 @@
       <artifactId>solr-solrj</artifactId>
       <version>${solr.version}</version>
     </dependency>
+    <dependency>
+    <groupId>org.apache.solr</groupId>
+    <artifactId>solr-core</artifactId>
+    <version>${solr.version}</version>
+    <exclusions>
+      <exclusion>
+        <groupId>*</groupId>
+        <artifactId>*</artifactId>
+      </exclusion>
+    </exclusions>
+  </dependency>
+    <dependency>
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-core</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.lucene</groupId>
+      <artifactId>lucene-analyzers-common</artifactId>
+      <version>${solr.version}</version>
+    </dependency>
     <!-- Hadoop -->
     <dependency>
       <groupId>org.apache.hadoop</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 43a7eb7..5dcdac1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -91,8 +91,6 @@ public class LogSearchConstants {
   public static final String UI_SUFFIX = "@UI@";
   public static final String SOLR_SUFFIX = "@Solr@";
   public static final String NGRAM_SUFFIX = "ngram_";
-  public static final String DEFAULT_SERVICE_COLUMN_SUFFIX = "service"; 
-  public static final String DEFAULT_AUDIT_COLUMN_SUFFIX = "audit";
 
   //Date Format for SOLR
   public static final String SOLR_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss,SSS";

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 91c4a26..cda5e26 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -22,10 +22,10 @@ package org.apache.ambari.logsearch.dao;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
-
 import org.apache.ambari.logsearch.common.LogsearchContextUtil;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
@@ -60,7 +60,10 @@ import org.springframework.beans.factory.annotation.Autowired;
 
 public abstract class SolrDaoBase {
   static private Logger logger = Logger.getLogger(SolrDaoBase.class);
-
+  
+  public HashMap<String, String> schemaFieldsNameMap = new HashMap<String, String>();
+  public HashMap<String, String> schemaFieldTypeMap = new HashMap<String, String>();
+  
   private static Logger logPerformance = Logger
     .getLogger("org.apache.ambari.logsearch.performance");
 
@@ -95,6 +98,7 @@ public abstract class SolrDaoBase {
   private boolean populateFieldsThreadActive = false;
 
   int SETUP_RETRY_SECOND = 30;
+  int SETUP_UPDATE_SECOND = 10*60; //10 min
   int ALIAS_SETUP_RETRY_SECOND = 30*60; //30 minutes
   
   private boolean isZkConnectString=false;//by default its false
@@ -599,17 +603,16 @@ public abstract class SolrDaoBase {
   }
 
   private void populateSchemaFields() {
-    boolean result = _populateSchemaFields();
-    if (!result && !populateFieldsThreadActive) {
+    if (!populateFieldsThreadActive) {
       populateFieldsThreadActive = true;
       logger.info("Creating thread to populated fields for collection="
-        + collectionName);
+          + collectionName);
       Thread fieldPopulationThread = new Thread("populated_fields_"
-        + collectionName) {
+          + collectionName) {
         @Override
         public void run() {
           logger.info("Started thread to get fields for collection="
-            + collectionName);
+              + collectionName);
           int retryCount = 0;
           while (true) {
             try {
@@ -617,27 +620,25 @@ public abstract class SolrDaoBase {
               retryCount++;
               boolean _result = _populateSchemaFields();
               if (_result) {
-                logger.info("Populate fields for collection "
-                  + collectionName + " is success");
-                break;
+                logger.info("Populate fields for collection " + collectionName
+                    + " is success, Update it after " + SETUP_UPDATE_SECOND
+                    + " sec");
+                Thread.sleep(SETUP_UPDATE_SECOND * 1000);
               }
             } catch (InterruptedException sleepInterrupted) {
-              logger.info("Sleep interrupted while populating fields for collection "
-                + collectionName);
+              logger
+                  .info("Sleep interrupted while populating fields for collection "
+                      + collectionName);
               break;
             } catch (Exception ex) {
               logger.error("Error while populating fields for collection "
-                + collectionName
-                + ", retryCount="
-                + retryCount);
-            } finally {
-              populateFieldsThreadActive = false;
+                  + collectionName + ", retryCount=" + retryCount);
             }
           }
+          populateFieldsThreadActive = false;
           logger.info("Exiting thread for populating fields. collection="
-            + collectionName);
+              + collectionName);
         }
-
       };
       fieldPopulationThread.setDaemon(true);
       fieldPopulationThread.start();
@@ -650,7 +651,7 @@ public abstract class SolrDaoBase {
   private boolean _populateSchemaFields() {
     SolrRequest<SchemaResponse> request = new SchemaRequest();
     request.setMethod(METHOD.GET);
-    request.setPath("/schema/fields");
+    request.setPath("/schema");
     String historyCollection = PropertiesUtil.getProperty("logsearch.solr.collection.history","history");
     if (solrClient != null && !collectionName.equals(historyCollection)) {
       NamedList<Object> namedList = null;
@@ -659,14 +660,13 @@ public abstract class SolrDaoBase {
         logger.info("populateSchemaFields() collection="
           + collectionName + ", fields=" + namedList);
       } catch (SolrException | SolrServerException | IOException e) {
-        
         logger.error(
           "Error occured while popuplating field. collection="
             + collectionName, e);
       }
       if (namedList != null) {
         ConfigUtil.extractSchemaFieldsName(namedList.toString(),
-          collectionName);
+          schemaFieldsNameMap,schemaFieldTypeMap);
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index 091c40b..6b2f049 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -192,6 +192,9 @@ public class UserDao {
    * @return
    */
   public String encryptPassword(String username, String password) {
+    if (!stringUtil.isEmpty(username)) {
+      username = username.toLowerCase();
+    }
     String saltEncodedpasswd = md5Encoder
       .encodePassword(password, username);
     return saltEncodedpasswd;

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index 361f8e3..d3975b3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -32,6 +32,7 @@ import org.apache.ambari.logsearch.query.QueryGeneration;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.util.StringUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
 import org.apache.ambari.logsearch.view.VBarGraphData;
@@ -45,9 +46,12 @@ import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RangeFacet;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.SimpleOrderedMap;
+import org.apache.solr.schema.TextField;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
+
+
 @Component
 public class GraphDataGenerator extends GraphDataGeneratorBase {
 
@@ -62,6 +66,9 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
 
   @Autowired
   DateUtil dateUtil;
+  
+  @Autowired
+  SolrUtil solrUtil;
 
   private static Logger logger = Logger.getLogger(GraphDataGenerator.class);
 
@@ -80,8 +87,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     String from = (String) searchCriteria.getParamValue("from");
     String to = (String) searchCriteria.getParamValue("to");
     String unit = (String) searchCriteria.getParamValue("unit");
-    String suffix = (String) searchCriteria.getParamValue("suffix");
-    String typeXAxis = ConfigUtil.schemaFieldsName.get(xAxisField + suffix);
+    String typeXAxis = solrDaoBase.schemaFieldsNameMap.get(xAxisField);
     typeXAxis = (stringUtil.isEmpty(typeXAxis)) ? "string" : typeXAxis;
 
     // Y axis credentials
@@ -102,8 +108,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
       return nonRangeStackGraph(xAxisField, yAxisField, stackField, from, to,
           solrDaoBase, typeXAxis, fieldTime, solrQuery);
     case RANGE_STACK_GRAPH:
-      return rangeStackGraph(xAxisField, yAxisField, stackField, from, to,
-          unit, solrDaoBase, typeXAxis, fieldTime, solrQuery);
+      return rangeStackGraph(xAxisField, stackField, from, to, unit, solrDaoBase, solrQuery);
     default:
       logger.warn("Invalid graph type :" + garphType.name());
       return null;
@@ -255,7 +260,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     queryGenerator.setMainQuery(solrQuery, mainQuery);
     queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     String jsonQuery = "";
-    if (isTypeNumber(typeXAxis)) {
+    if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       String function = (yAxisField.contains("count")) ? "sum" : yAxisField;
       jsonQuery = queryGenerator.buidlJSONFacetRangeQueryForNumber(stackField,
           xAxisField, function);
@@ -323,7 +328,7 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
     VBarGraphData vBarGraphData = new VBarGraphData();
     Collection<VNameValue> vNameValues = new ArrayList<VNameValue>();
     queryGenerator.setMainQuery(solrQuery, null);
-    if (isTypeNumber(typeXAxis)) {
+    if (solrUtil.isSolrFieldNumber(typeXAxis,solrDaoBase)) {
       queryGenerator.setSingleRangeFilter(solrQuery, fieldTime, from, to);
       return normalGraph(xAxisField, yAxisField, from, to, solrDaoBase,
           typeXAxis, fieldTime, solrQuery);
@@ -363,9 +368,8 @@ public class GraphDataGenerator extends GraphDataGeneratorBase {
   }
 
   @SuppressWarnings("unchecked")
-  private VBarDataList rangeStackGraph(String xAxisField, String yAxisField,
-      String stackField, String from, String to, String unit,
-      SolrDaoBase solrDaoBase, String typeXAxis, String fieldTime,
+  private VBarDataList rangeStackGraph(String xAxisField, String stackField,
+      String from, String to, String unit, SolrDaoBase solrDaoBase,
       SolrQuery solrQuery) {
     VBarDataList dataList = new VBarDataList();
     List<VBarGraphData> histogramData = new ArrayList<VBarGraphData>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
index 58c3a4d..ab287bc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/AuditMgr.java
@@ -468,12 +468,11 @@ public class AuditMgr extends MgrBase {
   }
 
   public String getAuditLogsSchemaFieldsName() {
-    String suffix = PropertiesUtil.getProperty("logsearch.solr.collection.audit.logs",LogSearchConstants.DEFAULT_AUDIT_COLUMN_SUFFIX);
     String excludeArray[] = PropertiesUtil
         .getPropertyStringList("logsearch.solr.audit.logs.exclude.columnlist");
     List<String> fieldNames = new ArrayList<String>();
     HashMap<String, String> uiFieldColumnMapping = new HashMap<String, String>();
-    ConfigUtil.getSchemaFieldsName(suffix, excludeArray, fieldNames);
+    ConfigUtil.getSchemaFieldsName(excludeArray, fieldNames,auditSolrDao);
 
     for (String fieldName : fieldNames) {
       String uiField = ConfigUtil.auditLogsColumnMapping.get(fieldName
@@ -492,15 +491,12 @@ public class AuditMgr extends MgrBase {
 
   public String getAnyGraphData(SearchCriteria searchCriteria) {
     searchCriteria.addParam("fieldTime", LogSearchConstants.AUDIT_EVTTIME);
-    String suffix = PropertiesUtil.getProperty("logsearch.solr.collection.audit.logs",LogSearchConstants.DEFAULT_AUDIT_COLUMN_SUFFIX);
-    searchCriteria.addParam("suffix", suffix);
     SolrQuery solrQuery = queryGenerator.commonAuditFilterQuery(searchCriteria);
     VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
         auditSolrDao, solrQuery);
     if (result == null) {
       result = new VBarDataList();
     }
-
     return convertObjToString(result);
 
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index 748d2f9..257f04d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -1626,12 +1626,11 @@ public class LogsMgr extends MgrBase {
   public String getServiceLogsSchemaFieldsName() {
 
     List<String> fieldNames = new ArrayList<String>();
-    String suffix = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",LogSearchConstants.DEFAULT_SERVICE_COLUMN_SUFFIX);
     String excludeArray[] = PropertiesUtil
         .getPropertyStringList("logsearch.solr.service.logs.exclude.columnlist");
 
     HashMap<String, String> uiFieldColumnMapping = new LinkedHashMap<String, String>();
-    ConfigUtil.getSchemaFieldsName(suffix, excludeArray, fieldNames);
+    ConfigUtil.getSchemaFieldsName(excludeArray, fieldNames,serviceLogsSolrDao);
 
     for (String fieldName : fieldNames) {
       String uiField = ConfigUtil.serviceLogsColumnMapping.get(fieldName
@@ -1695,8 +1694,6 @@ public class LogsMgr extends MgrBase {
 
   public String getAnyGraphData(SearchCriteria searchCriteria) {
     searchCriteria.addParam("fieldTime", LogSearchConstants.LOGTIME);
-    String suffix = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",LogSearchConstants.DEFAULT_SERVICE_COLUMN_SUFFIX);
-    searchCriteria.addParam("suffix", suffix);
     SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
     VBarDataList result = graphDataGenerator.getAnyGraphData(searchCriteria,
         serviceLogsSolrDao, solrQuery);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index 646abd6..d934e84 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -27,11 +27,19 @@ import java.util.regex.Pattern;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
+import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
+import org.apache.lucene.analysis.path.PathHierarchyTokenizerFactory;
+import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.schema.TrieDoubleField;
+import org.apache.solr.schema.TrieFloatField;
+import org.apache.solr.schema.TrieLongField;
 import org.springframework.stereotype.Component;
 
 @Component
@@ -245,6 +253,10 @@ public class QueryGeneration extends QueryGenerationBase {
               String value = getOriginalValue(originalKey,
                   "" + columnListMap.get(key));
               orQuery = putWildCardByType(value, originalKey, logType);
+              if (stringUtil.isEmpty(orQuery)) {
+                logger.debug("Removing invalid filter for key :"+originalKey +" and value :" +value );
+                continue;
+              }
               boolean isSame = false;
               if (elments.contains(key)) {
                 isSame = true;
@@ -337,48 +349,72 @@ public class QueryGeneration extends QueryGenerationBase {
   }
 
   private String putWildCardByType(String str, String key, LOG_TYPE logType) {
-    String type;
+    String fieldType;
+    SolrDaoBase solrDaoBase = null;
     switch (logType) {
     case AUDIT:
-      String auditSuffix = PropertiesUtil
-          .getProperty("logsearch.solr.collection.audit.logs",LogSearchConstants.DEFAULT_AUDIT_COLUMN_SUFFIX);
-      type = ConfigUtil.schemaFieldsName.get(key + auditSuffix);
+      fieldType = auditSolrDao.schemaFieldsNameMap.get(key);
+      solrDaoBase = auditSolrDao;
       break;
     case SERVICE:
-      String serviceLogs = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",LogSearchConstants.DEFAULT_SERVICE_COLUMN_SUFFIX);
-      type = ConfigUtil.schemaFieldsName.get(key + serviceLogs);
+      fieldType = serviceLogsSolrDao.schemaFieldsNameMap.get(key);
+      solrDaoBase = serviceLogsSolrDao;
       if (key.equalsIgnoreCase(LogSearchConstants.SOLR_LOG_MESSAGE)) {
         return solrUtil.escapeForLogMessage(key, str);
       }
       break;
     default:
       // set as null
-      type = null;
+      logger.error("Invalid logtype :" + logType);
+      fieldType = null;
     }
-    if (type == null) {
-      return key + ":" + "*" + str + "*";
-    } else if ("text_std_token_lower_case".equalsIgnoreCase(type)) {
-      return key + ":" + solrUtil.escapeForStandardTokenizer(str);
-    } else if ("key_lower_case".equalsIgnoreCase(type)
-        || "string".equalsIgnoreCase(type)) {
-      return key + ":" + solrUtil.makeSolrSearchStringWithoutAsterisk(str);
-    } else if ("ip_address".equalsIgnoreCase(type)) {
-      return key + ":" + str;
+    if (!stringUtil.isEmpty(fieldType)) {
+      if (solrUtil.isSolrFieldNumber(fieldType, solrDaoBase)) {
+        String value = putEscapeCharacterForNumber(str, fieldType,solrDaoBase);
+        if (!stringUtil.isEmpty(value)) {
+          return key + ":" + value;
+        } else {
+          return null;
+        }
+      } else if (checkTokenizer(fieldType, StandardTokenizerFactory.class,solrDaoBase)) {
+        return key + ":" + solrUtil.escapeForStandardTokenizer(str);
+      } else if (checkTokenizer(fieldType, KeywordTokenizerFactory.class,solrDaoBase)|| "string".equalsIgnoreCase(fieldType)) {
+        return key + ":" + solrUtil.makeSolrSearchStringWithoutAsterisk(str);
+      } else if (checkTokenizer(fieldType, PathHierarchyTokenizerFactory.class,solrDaoBase)) {
+        return key + ":" + str;
+      }
     }
-    return key + ":" + putEscapeCharacterForNumber(str);
+   return key + ":" + "*" + str + "*";
   }
 
-  private String putEscapeCharacterForNumber(String str) {
-    String escapeCharSting = "" + returnDefaultIfValueNotNumber(str);
-    escapeCharSting = str.replace("-", "\\-");
+  private String putEscapeCharacterForNumber(String str,String fieldType,SolrDaoBase solrDaoBase) {
+    if (!stringUtil.isEmpty(str)) {
+      str = str.replace("*", "");
+    }
+    String escapeCharSting = parseInputValueAsPerFieldType(str,fieldType,solrDaoBase);
+    if (escapeCharSting == null || escapeCharSting.isEmpty()) {
+      return null;
+    }
+    escapeCharSting = escapeCharSting.replace("-", "\\-");
     return escapeCharSting;
   }
 
-  private String returnDefaultIfValueNotNumber(String str) {
+  private String parseInputValueAsPerFieldType(String str,String fieldType,SolrDaoBase solrDaoBase ) {
     try {
-      return "" + Integer.parseInt(str);
+      HashMap<String, Object> fieldTypeInfoMap= solrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
+      String className = (String) fieldTypeInfoMap.get("class");
+      if( className.equalsIgnoreCase(TrieDoubleField.class.getSimpleName())){
+        return ""+ Double.parseDouble(str);
+      }else if(className.equalsIgnoreCase(TrieFloatField.class.getSimpleName())){
+        return ""+ Float.parseFloat(str);
+      }else if(className.equalsIgnoreCase(TrieLongField.class.getSimpleName())){
+        return ""+ Long.parseLong(str);
+      }else {
+        return "" + Integer.parseInt(str);
+      }
     } catch (Exception e) {
-      return "0";
+      logger.debug("Invaid input str: " + str + " For fieldType :" + fieldType);
+      return null;
     }
   }
 
@@ -436,4 +472,25 @@ public class QueryGeneration extends QueryGenerationBase {
     }
     return originalKey;
   }
+  
+  public boolean checkTokenizer(String fieldType,Class tokenizerFactoryClass,SolrDaoBase solrDaoBase) {
+    HashMap<String, Object> fieldTypeMap = solrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
+    HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeMap
+        .get("analyzer");
+    if (analyzer != null) {
+      HashMap<String, Object> tokenizerMap = (HashMap<String, Object>) analyzer
+          .get("tokenizer");
+      if (tokenizerMap != null) {
+        String tokenizerClass = (String) tokenizerMap.get("class");
+        if (!StringUtils.isEmpty(tokenizerClass)) {
+          tokenizerClass =tokenizerClass.replace("solr.", "");
+          if (tokenizerClass.equalsIgnoreCase(tokenizerFactoryClass
+              .getSimpleName())) {
+            return true;
+          }
+        }
+      }
+    }
+    return false;
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
index a49107d..a128098 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
@@ -24,6 +24,8 @@ import java.util.List;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
+import org.apache.ambari.logsearch.dao.AuditSolrDao;
+import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.QueryBase;
 import org.apache.ambari.logsearch.util.SolrUtil;
@@ -34,6 +36,7 @@ import org.apache.solr.client.solrj.SolrQuery.ORDER;
 import org.springframework.beans.factory.annotation.Autowired;
 
 import com.google.gson.Gson;
+import com.sun.jersey.api.spring.Autowire;
 
 public abstract class QueryGenerationBase extends QueryBase {
 
@@ -47,6 +50,14 @@ public abstract class QueryGenerationBase extends QueryBase {
 
   @Autowired
   JSONUtil jsonUtil;
+  
+  @Autowired
+  AuditSolrDao auditSolrDao;
+  
+  @Autowired
+  ServiceLogsSolrDao serviceLogsSolrDao;
+  
+  
 
   public static enum CONDITION {
     OR, AND

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
index bdd304f..2661150 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
@@ -23,9 +23,9 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.manager.MgrBase;
 import org.apache.log4j.Logger;
 import org.codehaus.jettison.json.JSONArray;
@@ -38,7 +38,6 @@ public class ConfigUtil {
 
   public static HashMap<String, String> auditLogsColumnMapping = new HashMap<String, String>();
 
-  public static HashMap<String, String> schemaFieldsName = new HashMap<String, String>();
 
   public static void initializeApplicationConfig() {
     initializeColumnMapping();
@@ -78,57 +77,63 @@ public class ConfigUtil {
       auditLogsColumnMapping);
   }
 
+  
   public static void extractSchemaFieldsName(String responseString,
-                                             String suffix) {
+      HashMap<String, String> schemaFieldsNameMap,
+      HashMap<String, String> schemaFieldTypeMap) {
     try {
       JSONObject jsonObject = new JSONObject(responseString);
-      JSONArray jsonArrayList = jsonObject.getJSONArray("fields");
-      
-      if(jsonArrayList == null){
+      JSONObject schemajsonObject = jsonObject.getJSONObject("schema");
+      JSONArray jsonArrayList = schemajsonObject.getJSONArray("fields");
+      JSONArray fieldTypeJsonArray = schemajsonObject
+          .getJSONArray("fieldTypes");
+      if (jsonArrayList == null) {
+        return;
+      }
+      if (fieldTypeJsonArray == null) {
         return;
       }
+      HashMap<String, String> _schemaFieldTypeMap = new HashMap<String, String>();
+      HashMap<String, String> _schemaFieldsNameMap = new HashMap<String, String>();
+      for (int i = 0; i < fieldTypeJsonArray.length(); i++) {
+        JSONObject typeObject = fieldTypeJsonArray.getJSONObject(i);
+        String name = typeObject.getString("name");
+        String fieldTypeJson = typeObject.toString();
+        _schemaFieldTypeMap.put(name, fieldTypeJson);
+      }
 
       for (int i = 0; i < jsonArrayList.length(); i++) {
         JSONObject explrObject = jsonArrayList.getJSONObject(i);
         String name = explrObject.getString("name");
         String type = explrObject.getString("type");
-
         if (!name.contains("@") && !name.startsWith("_")
-          && !name.contains("_md5") && !name.contains("_ms")
-          && !name.contains(LogSearchConstants.NGRAM_SUFFIX)
-          && !name.contains("tags") && !name.contains("_str")) {
-          schemaFieldsName.put(name + suffix, type);
+            && !name.contains("_md5") && !name.contains("_ms")
+            && !name.contains(LogSearchConstants.NGRAM_SUFFIX)
+            && !name.contains("tags") && !name.contains("_str")) {
+          _schemaFieldsNameMap.put(name, type);
         }
       }
-
+      schemaFieldsNameMap.clear();
+      schemaFieldTypeMap.clear();
+      schemaFieldsNameMap.putAll(_schemaFieldsNameMap);
+      schemaFieldTypeMap.putAll(_schemaFieldTypeMap);
     } catch (Exception e) {
-
       logger.error(e + "Credentials not specified in logsearch.properties "
-        + MessageEnums.ERROR_SYSTEM);
-
+          + MessageEnums.ERROR_SYSTEM);
     }
-
   }
 
   @SuppressWarnings("rawtypes")
-  public static void getSchemaFieldsName(String suffix, String excludeArray[],
-                                         List<String> fieldNames) {
-    if (!schemaFieldsName.isEmpty()) {
-      Iterator iteratorSechmaFieldsName = schemaFieldsName.entrySet()
-        .iterator();
-
+  public static void getSchemaFieldsName(String excludeArray[],
+      List<String> fieldNames, SolrDaoBase solrDaoBase) {
+    if (!solrDaoBase.schemaFieldsNameMap.isEmpty()) {
+      Iterator iteratorSechmaFieldsName = solrDaoBase.schemaFieldsNameMap
+          .entrySet().iterator();
       while (iteratorSechmaFieldsName.hasNext()) {
-
-        Map.Entry fieldName = (Map.Entry) iteratorSechmaFieldsName
-          .next();
+        Map.Entry fieldName = (Map.Entry) iteratorSechmaFieldsName.next();
         String field = "" + fieldName.getKey();
-
-        if (field.contains(suffix)) {
-          field = field.replace(suffix, "");
-          if (!isExclude(field, excludeArray)) {
-            fieldNames.add(field);
-          }
-
+        if (!isExclude(field, excludeArray)) {
+          fieldNames.add(field);
         }
       }
     }
@@ -144,5 +149,4 @@ public class ConfigUtil {
     }
     return false;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4562dcd6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
index f68891f..ee706bf 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
@@ -20,10 +20,16 @@
 package org.apache.ambari.logsearch.util;
 
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Locale;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.log4j.Logger;
+import org.apache.solr.schema.TrieDoubleField;
+import org.apache.solr.schema.TrieFloatField;
+import org.apache.solr.schema.TrieIntField;
+import org.apache.solr.schema.TrieLongField;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
@@ -34,6 +40,9 @@ public class SolrUtil {
   @Autowired
   StringUtil stringUtil;
 
+  @Autowired
+  JSONUtil jsonUtil;
+
   public String setField(String fieldName, String value) {
     if (value == null || value.trim().length() == 0) {
       return "";
@@ -296,4 +305,49 @@ public class SolrUtil {
 
     return newSearch.replace(" ", "\\ ");
   }
+  
+
+  public boolean isSolrFieldNumber(String fieldType,SolrDaoBase solrDaoBase) {
+    if (stringUtil.isEmpty(fieldType)) {
+      return false;
+    } else {
+      HashMap<String, Object> typeInfoMap = getFieldTypeInfoMap(fieldType,solrDaoBase);
+      if (typeInfoMap == null || typeInfoMap.isEmpty()) {
+        return false;
+      }
+      String fieldTypeClassName = (String) typeInfoMap.get("class");
+      if (fieldTypeClassName.equalsIgnoreCase(TrieIntField.class
+          .getSimpleName())) {
+        return true;
+      }
+      if (fieldTypeClassName.equalsIgnoreCase(TrieDoubleField.class
+          .getSimpleName())) {
+        return true;
+      }
+      if (fieldTypeClassName.equalsIgnoreCase(TrieFloatField.class
+          .getSimpleName())) {
+        return true;
+      }
+      if (fieldTypeClassName.equalsIgnoreCase(TrieLongField.class
+          .getSimpleName())) {
+        return true;
+      }
+      return false;
+    }
+  }
+  
+  public HashMap<String, Object> getFieldTypeInfoMap(String fieldType,SolrDaoBase solrDaoBase) {
+    String fieldTypeMetaData = solrDaoBase.schemaFieldTypeMap.get(fieldType);
+    HashMap<String, Object> fieldTypeMap = jsonUtil
+        .jsonToMapObject(fieldTypeMetaData);
+    if (fieldTypeMap == null) {
+      return new HashMap<String, Object>();
+    }
+    String classname = (String) fieldTypeMap.get("class");
+    if (!stringUtil.isEmpty(classname)) {
+      classname = classname.replace("solr.", "");
+      fieldTypeMap.put("class", classname);
+    }
+    return fieldTypeMap;
+  }
 }
\ No newline at end of file


[15/50] [abbrv] ambari git commit: AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
index a128098..ca6df65 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
@@ -29,56 +29,47 @@ import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.QueryBase;
 import org.apache.ambari.logsearch.util.SolrUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.ORDER;
 import org.springframework.beans.factory.annotation.Autowired;
 
 import com.google.gson.Gson;
-import com.sun.jersey.api.spring.Autowire;
 
 public abstract class QueryGenerationBase extends QueryBase {
 
-  static Logger logger = Logger.getLogger(QueryGenerationBase.class);
+  private static final Logger logger = Logger.getLogger(QueryGenerationBase.class);
 
   @Autowired
-  SolrUtil solrUtil;
-
-  @Autowired
-  StringUtil stringUtil;
-
-  @Autowired
-  JSONUtil jsonUtil;
+  protected SolrUtil solrUtil;
   
   @Autowired
-  AuditSolrDao auditSolrDao;
+  protected AuditSolrDao auditSolrDao;
   
   @Autowired
-  ServiceLogsSolrDao serviceLogsSolrDao;
-  
+  protected ServiceLogsSolrDao serviceLogsSolrDao;
   
+  @Autowired
+  protected JSONUtil jsonUtil;
 
-  public static enum CONDITION {
+  public static enum Condition {
     OR, AND
   }
 
   // SetMethods to apply to the query
-  public void setFilterClauseForSolrSearchableString(SolrQuery solrQuery,
-      String commaSepratedString, CONDITION condition, String operator,
-      String messageField) {
+  protected void setFilterClauseForSolrSearchableString(SolrQuery solrQuery, String commaSepratedString, Condition condition,
+      String operator, String messageField) {
     String filterQuery = "";
-    if (!stringUtil.isEmpty(commaSepratedString)) {
+    if (!StringUtils.isBlank(commaSepratedString)) {
       StringBuilder queryMsg = new StringBuilder();
       operator = (operator == null ? LogSearchConstants.NO_OPERATOR : operator);
-      String[] msgList = commaSepratedString
-          .split(LogSearchConstants.I_E_SEPRATOR);
+      String[] msgList = commaSepratedString.split(LogSearchConstants.I_E_SEPRATOR);
       int count = 0;
       for (String temp : msgList) {
         count += 1;
         if (LogSearchConstants.SOLR_LOG_MESSAGE.equalsIgnoreCase(messageField)) {
-          queryMsg.append(" " + operator
-              + solrUtil.escapeForLogMessage(messageField, temp));
+          queryMsg.append(" " + operator + solrUtil.escapeForLogMessage(messageField, temp));
         } else {
           temp = solrUtil.escapeForStandardTokenizer(temp);
           if(temp.startsWith("\"") && temp.endsWith("\"")){
@@ -86,8 +77,7 @@ public abstract class QueryGenerationBase extends QueryBase {
             temp = temp.substring(0, temp.length()-1);
           }
           temp = "*" + temp + "*";
-          queryMsg.append(" " + operator + messageField + ":"
-              + temp);
+          queryMsg.append(" " + operator + messageField + ":" + temp);
         }
         if (msgList.length > count){
           queryMsg.append(" " + condition.name() + " ");
@@ -99,21 +89,20 @@ public abstract class QueryGenerationBase extends QueryBase {
     }
   }
 
-  public void setFilterClauseWithFieldName(SolrQuery solrQuery,
-      String commaSepratedString, String field, String operator,
-      CONDITION condition) {
-    if (!stringUtil.isEmpty(commaSepratedString)) {
+  public void setFilterClauseWithFieldName(SolrQuery solrQuery, String commaSepratedString, String field, String operator,
+      Condition condition) {
+    if (!StringUtils.isBlank(commaSepratedString)) {
       String[] arrayOfSepratedString = commaSepratedString.split(LogSearchConstants.LIST_SEPARATOR);
       String filterQuery = null;
-      if (CONDITION.OR.equals(condition)) {
+      if (Condition.OR.equals(condition)) {
         filterQuery = solrUtil.orList(operator + field, arrayOfSepratedString,"");
-      } else if (CONDITION.AND.equals(condition)) {
+      } else if (Condition.AND.equals(condition)) {
         filterQuery = solrUtil.andList(operator + field, arrayOfSepratedString,"");
       }else{
         logger.warn("Not a valid condition :" + condition.name());
       }
       //add
-      if(!stringUtil.isEmpty(filterQuery)){
+      if (!StringUtils.isBlank(filterQuery)){
         solrQuery.addFilterQuery(filterQuery);
         logger.debug("Filter added :- " + filterQuery);
       }
@@ -121,58 +110,42 @@ public abstract class QueryGenerationBase extends QueryBase {
     }
   }
 
-  public void setSortOrderDefaultServiceLog(SolrQuery solrQuery,
-      SearchCriteria searchCriteria) {
+  public void setSortOrderDefaultServiceLog(SolrQuery solrQuery, SearchCriteria searchCriteria) {
     List<SolrQuery.SortClause> defaultSort = new ArrayList<SolrQuery.SortClause>();
-    if (searchCriteria.getSortBy() != null
-        && (!searchCriteria.getSortBy().isEmpty())) {
+    if (!StringUtils.isBlank(searchCriteria.getSortBy())) {
       ORDER order = SolrQuery.ORDER.asc;
-      if (searchCriteria.getSortType() != null
-          && (!searchCriteria.getSortType().isEmpty())
-          && !searchCriteria.getSortType().equalsIgnoreCase(order.toString())) {
+      if (!order.toString().equalsIgnoreCase(searchCriteria.getSortType())) {
         order = SolrQuery.ORDER.desc;
       }
-      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(
-          searchCriteria.getSortBy(), order);
+      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(searchCriteria.getSortBy(), order);
       defaultSort.add(logtimeSortClause);
     } else {
-      // by default sorting by logtime and sequence number in
-      // Descending order
-      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(
-          LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
+      // by default sorting by logtime and sequence number in Descending order
+      SolrQuery.SortClause logtimeSortClause = SolrQuery.SortClause.create(LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
       defaultSort.add(logtimeSortClause);
 
     }
-    SolrQuery.SortClause sequenceNumberSortClause = SolrQuery.SortClause
-        .create(LogSearchConstants.SEQUNCE_ID, SolrQuery.ORDER.desc);
+    SolrQuery.SortClause sequenceNumberSortClause = SolrQuery.SortClause.create(LogSearchConstants.SEQUNCE_ID, SolrQuery.ORDER.desc);
     defaultSort.add(sequenceNumberSortClause);
     solrQuery.setSorts(defaultSort);
     logger.debug("Sort Order :-" + defaultSort);
   }
 
-  public void setFilterFacetSort(SolrQuery solrQuery,
-                                 SearchCriteria searchCriteria) {
-    if (searchCriteria.getSortBy() != null
-      && (!searchCriteria.getSortBy().isEmpty())) {
+  public void setFilterFacetSort(SolrQuery solrQuery, SearchCriteria searchCriteria) {
+    if (!StringUtils.isBlank(searchCriteria.getSortBy())) {
       solrQuery.setFacetSort(searchCriteria.getSortBy());
       logger.info("Sorted By :- " + searchCriteria.getSortBy());
     }
   }
 
-  public void setSingleSortOrder(SolrQuery solrQuery,
-                                 SearchCriteria searchCriteria) {
+  public void setSingleSortOrder(SolrQuery solrQuery, SearchCriteria searchCriteria) {
     List<SolrQuery.SortClause> sort = new ArrayList<SolrQuery.SortClause>();
-    if (searchCriteria.getSortBy() != null
-      && (!searchCriteria.getSortBy().isEmpty())) {
+    if (!StringUtils.isBlank(searchCriteria.getSortBy())) {
       ORDER order = SolrQuery.ORDER.asc;
-      if (searchCriteria.getSortType() != null
-        && (!searchCriteria.getSortType().isEmpty())
-        && !searchCriteria.getSortType().equalsIgnoreCase(
-        order.toString())) {
+      if (!order.toString().equalsIgnoreCase(searchCriteria.getSortType())) {
         order = SolrQuery.ORDER.desc;
       }
-      SolrQuery.SortClause sortOrder = SolrQuery.SortClause.create(
-        searchCriteria.getSortBy(), order);
+      SolrQuery.SortClause sortOrder = SolrQuery.SortClause.create(searchCriteria.getSortBy(), order);
       sort.add(sortOrder);
       solrQuery.setSorts(sort);
       logger.debug("Sort Order :-" + sort);
@@ -182,23 +155,19 @@ public abstract class QueryGenerationBase extends QueryBase {
   // Search Criteria has parameter "sort" from it can get list of Sort Order
   // Example of list can be [logtime desc,seq_num desc]
   @SuppressWarnings("unchecked")
-  public void setMultipleSortOrder(SolrQuery solrQuery,
-      SearchCriteria searchCriteria) {
+  public void setMultipleSortOrder(SolrQuery solrQuery, SearchCriteria searchCriteria) {
     List<SolrQuery.SortClause> sort = new ArrayList<SolrQuery.SortClause>();
     List<String> sortList = (List<String>) searchCriteria.getParamValue("sort");
     if (sortList != null) {
       for (String sortOrder : sortList) {
-        if (!stringUtil.isEmpty(sortOrder)) {
+        if (!StringUtils.isBlank(sortOrder)) {
           String sortByAndOrder[] = sortOrder.split(" ");
           if (sortByAndOrder.length > 1) {
-            ORDER order = sortByAndOrder[1].contains("asc") ? SolrQuery.ORDER.asc
-                : SolrQuery.ORDER.desc;
-            SolrQuery.SortClause solrSortClause = SolrQuery.SortClause.create(
-                sortByAndOrder[0], order);
+            ORDER order = sortByAndOrder[1].contains("asc") ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc;
+            SolrQuery.SortClause solrSortClause = SolrQuery.SortClause.create(sortByAndOrder[0], order);
             sort.add(solrSortClause);
             logger.debug("Sort Order :-" + sort);
           } else {
-            // log warn
             logger.warn("Not a valid sort Clause " + sortOrder);
           }
         }
@@ -207,34 +176,26 @@ public abstract class QueryGenerationBase extends QueryBase {
     }
   }
 
-  public void setSingleIncludeFilter(SolrQuery solrQuery, String filterType,
-                                     String filterValue) {
-    if (filterType != null && !filterType.isEmpty() && filterValue != null
-      && !filterValue.isEmpty()) {
+  public void setSingleIncludeFilter(SolrQuery solrQuery, String filterType, String filterValue) {
+    if (!StringUtils.isBlank(filterType) && !StringUtils.isBlank(filterValue)) {
       String filterQuery = buildFilterQuery(filterType, filterValue);
       solrQuery.addFilterQuery(filterQuery);
       logger.debug("Filter added :- " + filterQuery);
     }
   }
 
-  public void setSingleExcludeFilter(SolrQuery solrQuery, String filterType,
-      String filterValue) {
-    if (!stringUtil.isEmpty(filterValue) && !stringUtil.isEmpty(filterType)) {
-      String filterQuery = LogSearchConstants.MINUS_OPERATOR
-          + buildFilterQuery(filterType, filterValue);
+  public void setSingleExcludeFilter(SolrQuery solrQuery, String filterType, String filterValue) {
+    if (!StringUtils.isBlank(filterValue) && !StringUtils.isBlank(filterType)) {
+      String filterQuery = LogSearchConstants.MINUS_OPERATOR + buildFilterQuery(filterType, filterValue);
       solrQuery.addFilterQuery(filterQuery);
       logger.debug("Filter added :- " + filterQuery);
     }
   }
 
-  public void setSingleRangeFilter(SolrQuery solrQuery, String filterType,
-      String filterFromValue, String filterToValue) {
-    if (!stringUtil.isEmpty(filterToValue)
-        && !stringUtil.isEmpty(filterType)
-        && !stringUtil.isEmpty(filterFromValue)) {
-      String filterQuery = buildInclusiveRangeFilterQuery(filterType,
-          filterFromValue, filterToValue);
-      if (!stringUtil.isEmpty(filterQuery)) {
+  public void setSingleRangeFilter(SolrQuery solrQuery, String filterType, String filterFromValue, String filterToValue) {
+    if (!StringUtils.isBlank(filterToValue) && !StringUtils.isBlank(filterType) && !StringUtils.isBlank(filterFromValue)) {
+      String filterQuery = buildInclusiveRangeFilterQuery(filterType, filterFromValue, filterToValue);
+      if (!StringUtils.isBlank(filterQuery)) {
         solrQuery.addFilterQuery(filterQuery);
         logger.debug("Filter added :- " + filterQuery);
       }
@@ -258,30 +219,17 @@ public abstract class QueryGenerationBase extends QueryBase {
     }
 
     if (startIndex != null && maxRows != null)
-      logger.info("Pagination was set from " + startIndex.intValue()
-        + " to " + maxRows.intValue());
+      logger.info("Pagination was set from " + startIndex.intValue() + " to " + maxRows.intValue());
   }
 
-  public void setSingleORFilter(SolrQuery solrQuery, String filterName1,
-      String value1, String filterName2, String value2) {
-    String filterQuery = filterName1 + ":" + value1 + " " + CONDITION.OR.name()
-        + " " + filterName2 + ":" + value2;
+  public void setSingleORFilter(SolrQuery solrQuery, String filterName1, String value1, String filterName2, String value2) {
+    String filterQuery = filterName1 + ":" + value1 + " " + Condition.OR.name() + " " + filterName2 + ":" + value2;
     solrQuery.setFilterQueries(filterQuery);
   }
 
   // BuildMethods to prepare a particular format as required for solr
-  public String buildInclusiveRangeFilterQuery(String filterType,
-                                               String filterFromValue, String filterToValue) {
-    String filterQuery = filterType + ":[" + filterFromValue + " TO "
-      + filterToValue + "]";
-    logger.info("Build Filter was :- " + filterQuery);
-    return filterQuery;
-  }
-
-  public String buildExclusiveRangeFilterQuery(String filterType,
-                                               String filterFromValue, String filterToValue) {
-    String filterQuery = filterType + ":{" + filterFromValue + " TO "
-      + filterToValue + "}";
+  public String buildInclusiveRangeFilterQuery(String filterType, String filterFromValue, String filterToValue) {
+    String filterQuery = filterType + ":[" + filterFromValue + " TO " + filterToValue + "]";
     logger.info("Build Filter was :- " + filterQuery);
     return filterQuery;
   }
@@ -292,139 +240,32 @@ public abstract class QueryGenerationBase extends QueryBase {
     return filterQuery;
   }
 
-//  public String buildQueryFromJSONCompHost(String jsonHCNames,
-//      String selectedComponent) {
-//    String queryHostComponent = "";
-//    // Building and adding exclude string to filters
-//    String selectedCompQuery = "";
-//    if (!stringUtil.isEmpty(selectedComponent)) {
-//      String[] selectedComponents = selectedComponent
-//          .split(LogSearchConstants.LIST_SEPARATOR);
-//      selectedCompQuery = solrUtil.orList(LogSearchConstants.SOLR_COMPONENT,
-//          selectedComponents);
-//    }
-//
-//    // Building Query of Host and Components from given json
-//    if (jsonHCNames != null && !jsonHCNames.equals("")
-//        && !jsonHCNames.equals("[]")) {
-//
-//      try {
-//        JSONArray jarray = new JSONArray(jsonHCNames);
-//        int flagHost = 0;
-//        int flagComp;
-//        int count;
-//        for (int i = 0; i < jarray.length(); i++) {
-//          if (flagHost == 1)
-//            queryHostComponent = queryHostComponent + " OR ";
-//          JSONObject jsonObject = jarray.getJSONObject(i);
-//          String host = jsonObject.getString("h");
-//          queryHostComponent = queryHostComponent + "( host:" + host;
-//          List<String> components = JSONUtil.JSONToList(jsonObject
-//              .getJSONArray("c"));
-//          if (!components.isEmpty())
-//            queryHostComponent = queryHostComponent + " AND ";
-//
-//          flagComp = 0;
-//          count = 0;
-//          for (String comp : components) {
-//            if (flagComp == 0)
-//              queryHostComponent = queryHostComponent + " ( ";
-//            count += 1;
-//            queryHostComponent = queryHostComponent + " " + " type:" + comp;
-//            if (components.size() <= count)
-//              queryHostComponent = queryHostComponent + " ) ";
-//            else
-//              queryHostComponent = queryHostComponent + " OR ";
-//            flagComp = 1;
-//          }
-//          queryHostComponent = queryHostComponent + " ) ";
-//          flagHost = 1;
-//        }
-//      } catch (JSONException e) {
-//        logger.error(e);
-//      }
-//    }
-//    if (selectedCompQuery != null && !selectedCompQuery.equals("")) {
-//      if (queryHostComponent == null || queryHostComponent.equals(""))
-//        queryHostComponent = selectedCompQuery;
-//      else
-//        queryHostComponent = queryHostComponent + " OR " + selectedCompQuery;
-//    }
-//    return queryHostComponent;
-//  }
-
-  // JSON BuildMethods
-
-  /**
-   * @param function , xAxisField
-   * @return jsonString
-   */
-  public String buildJSONFacetAggregatedFuncitonQuery(String function,
-                                                      String xAxisField) {
+  public String buildJSONFacetAggregatedFuncitonQuery(String function, String xAxisField) {
     return "{x:'" + function + "(" + xAxisField + ")'}";
   }
 
-  /**
-   * @param fieldName , fieldTime, from, to, unit
-   * @return jsonString
-   * @hierarchy Term, Time Range
-   */
-  public String buildJSONFacetTermTimeRangeQuery(String fieldName,
-                                                 String fieldTime, String from, String to, String unit) {
+  public String buildJSONFacetTermTimeRangeQuery(String fieldName, String fieldTime, String from, String to, String unit) {
     String query = "{";
-    query += "x" + ":{type:terms,field:" + fieldName
-      + ",facet:{y:{type:range,field:" + fieldTime + ",start:\""
-      + from + "\",end:\"" + to + "\",gap:\"" + unit + "\"}}}";
+    query += "x" + ":{type:terms,field:" + fieldName + ",facet:{y:{type:range,field:" + fieldTime + ",start:\"" + from + "\",end:\"" + to + "\",gap:\"" + unit + "\"}}}";
     query += "}";
     logger.info("Build JSONQuery is :- " + query);
     return query;
   }
 
-  /**
-   * @param stackField , xAxisField
-   * @return jsonString
-   * @hierarchy Term, Range
-   */
-  public String buildJsonFacetTermsRangeQuery(String stackField,
-                                              String xAxisField) {
-    String jsonQuery = "{ " + stackField + ": { type: terms,field:"
-      + stackField + "," + "facet: {   x: { type: terms, field:"
-      + xAxisField + ",mincount:0,sort:{index:asc}}}}}";
-    logger.info("Build JSONQuery is :- " + jsonQuery);
-    return jsonQuery;
-  }
-
-  /**
-   * @param stackField , xAxisField, function
-   * @return
-   * @hierarchy Term, Range
-   */
-  public String buidlJSONFacetRangeQueryForNumber(String stackField,
-                                                  String xAxisField, String function) {
-    String jsonQuery = "{ " + stackField + ": { type: terms,field:"
-      + stackField + "," + "facet: {   x:'" + function + "("
-      + xAxisField + ")'}}}}";
+  public String buildJsonFacetTermsRangeQuery(String stackField, String xAxisField) {
+    String jsonQuery = "{ " + stackField + ": { type: terms,field:" + stackField + "," + "facet: {   x: { type: terms, field:" + xAxisField + ",mincount:0,sort:{index:asc}}}}}";
     logger.info("Build JSONQuery is :- " + jsonQuery);
     return jsonQuery;
   }
 
-  /**
-   * @param stackField , xAxisField, function
-   * @return
-   * @hierarchy Query, T
-   */
-  public String buidlJSONFacetRangeQueryForSuggestion(
-    String originalFieldName, String valueToSuggest) {
-    String jsonQuery = "{y:{type:query,query:\"" + originalFieldName + ":"
-      + valueToSuggest + "\",facet:{x:{type:terms,field:"
-      + originalFieldName + "}}}}";
+  public String buidlJSONFacetRangeQueryForNumber(String stackField, String xAxisField, String function) {
+    String jsonQuery = "{ " + stackField + ": { type: terms,field:" + stackField + "," + "facet: {   x:'" + function + "(" + xAxisField + ")'}}}}";
     logger.info("Build JSONQuery is :- " + jsonQuery);
     return jsonQuery;
   }
 
-  public String buildListQuery(String paramValue, String solrFieldName,
-      CONDITION condition) {
-    if (!stringUtil.isEmpty(paramValue)) {
+  private String buildListQuery(String paramValue, String solrFieldName, Condition condition) {
+    if (!StringUtils.isBlank(paramValue)) {
       String[] values = paramValue.split(LogSearchConstants.LIST_SEPARATOR);
       switch (condition) {
       case OR:
@@ -438,10 +279,8 @@ public abstract class QueryGenerationBase extends QueryBase {
     return "";
   }
 
-
-  public void addFilterQueryFromArray(SolrQuery solrQuery, String jsonArrStr,
-      String solrFieldName, CONDITION condition) {
-    if (!stringUtil.isEmpty(jsonArrStr) && condition != null && solrQuery!=null) {
+  protected void addFilterQueryFromArray(SolrQuery solrQuery, String jsonArrStr, String solrFieldName, Condition condition) {
+    if (!StringUtils.isBlank(jsonArrStr) && condition != null && solrQuery != null) {
       Gson gson = new Gson();
       String[] arr = null;
       try {
@@ -462,16 +301,15 @@ public abstract class QueryGenerationBase extends QueryBase {
         query=null;
         logger.error("Invalid condition :" + condition.name());
       }
-      if (!stringUtil.isEmpty(query)) {
+      if (!StringUtils.isBlank(query)) {
         solrQuery.addFilterQuery(query);
       }
     }
   }
 
-  public void addFilter(SolrQuery solrQuery, String paramValue,
-      String solrFieldName, CONDITION condition) {
+  protected void addFilter(SolrQuery solrQuery, String paramValue, String solrFieldName, Condition condition) {
     String filterQuery = buildListQuery(paramValue, solrFieldName, condition);
-    if (!stringUtil.isEmpty(filterQuery)) {
+    if (!StringUtils.isBlank(filterQuery)) {
       if (solrQuery != null) {
         solrQuery.addFilterQuery(filterQuery);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
index 373f7ff..a906ceb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
@@ -113,7 +113,7 @@ public class ServiceLogsREST {
   public String getHosts(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getHosts(searchCriteria);
+    return logMgr.getHosts();
   }
 
   @GET
@@ -126,7 +126,7 @@ public class ServiceLogsREST {
   public String getComponents(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("q", request.getParameter("q"));
-    return logMgr.getComponents(searchCriteria);
+    return logMgr.getComponents();
   }
 
   @GET
@@ -174,7 +174,7 @@ public class ServiceLogsREST {
     searchCriteria
       .addParam("startDate", request.getParameter("start_time"));
     searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getLogLevelCount(searchCriteria);
+    return logMgr.getLogLevelCount();
   }
 
   @GET
@@ -192,7 +192,7 @@ public class ServiceLogsREST {
     searchCriteria
       .addParam("startDate", request.getParameter("start_time"));
     searchCriteria.addParam("endDate", request.getParameter("end_time"));
-    return logMgr.getComponentsCount(searchCriteria);
+    return logMgr.getComponentsCount();
   }
 
   @GET
@@ -216,7 +216,7 @@ public class ServiceLogsREST {
       .unescapeXml(request.getParameter("excludeQuery")));
     searchCriteria.addParam("includeQuery", StringEscapeUtils
       .unescapeXml(request.getParameter("includeQuery")));
-    return logMgr.getHostsCount(searchCriteria);
+    return logMgr.getHostsCount();
   }
 
   @GET

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchContextHolder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchContextHolder.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchContextHolder.java
deleted file mode 100644
index fb23cde..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchContextHolder.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.security.context;
-
-public class LogsearchContextHolder {
-
-  private static final ThreadLocal<LogsearchSecurityContext> securityContextThreadLocal = new ThreadLocal<LogsearchSecurityContext>();
-
-  private LogsearchContextHolder() {
-
-  }
-
-  public static LogsearchSecurityContext getSecurityContext() {
-    return securityContextThreadLocal.get();
-  }
-
-  public static void setSecurityContext(LogsearchSecurityContext context) {
-    securityContextThreadLocal.set(context);
-  }
-
-  public static void resetSecurityContext() {
-    securityContextThreadLocal.remove();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchSecurityContext.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchSecurityContext.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchSecurityContext.java
deleted file mode 100644
index 4a79525..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/security/context/LogsearchSecurityContext.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.security.context;
-
-import java.io.Serializable;
-
-import org.apache.ambari.logsearch.common.RequestContext;
-import org.apache.ambari.logsearch.common.UserSessionInfo;
-
-
-public class LogsearchSecurityContext implements Serializable{
-    private static final long serialVersionUID = 1L;
-    private UserSessionInfo userSession;
-    private RequestContext requestContext;
-
-    public UserSessionInfo getUserSession() {
-        return userSession;
-    }
-
-    public void setUserSession(UserSessionInfo userSession) {
-        this.userSession = userSession;
-    }
-
-    /**
-     * @return the requestContext
-     */
-    public RequestContext getRequestContext() {
-        return requestContext;
-    }
-
-    /**
-     * @param requestContext the requestContext to set
-     */
-    public void setRequestContext(RequestContext requestContext) {
-        this.requestContext = requestContext;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
index 8dd0fb9..814b8ee 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
@@ -37,27 +37,22 @@ import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VHost;
 import org.apache.ambari.logsearch.view.VNameValue;
 import org.apache.ambari.logsearch.view.VSummary;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class BizUtil {
-  static Logger logger = Logger.getLogger(BizUtil.class);
-
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-  
-  @Autowired
-  StringUtil stringUtil;
+  private static final Logger logger = Logger.getLogger(BizUtil.class);
 
   public String convertObjectToNormalText(SolrDocumentList docList) {
     String textToSave = "";
     HashMap<String, String> blankFieldsMap = new HashMap<String, String>();
-    if(docList == null){
+    if (docList == null){
       return "no data";
     }
     if (docList.isEmpty()) {
@@ -67,66 +62,50 @@ public class BizUtil {
     if(docForBlankCaculation == null){
       return "no data";
     }
-    Collection<String> fieldsForBlankCaculation = docForBlankCaculation
-      .getFieldNames();
+    Collection<String> fieldsForBlankCaculation = docForBlankCaculation.getFieldNames();
 
     int maxLengthOfField = 0;
-    if(fieldsForBlankCaculation == null){
+    if (fieldsForBlankCaculation == null) {
       return "no data";
     }
-    for (String field : fieldsForBlankCaculation) {  
-      if (!stringUtil.isEmpty(field) && field.length() > maxLengthOfField){
+    for (String field : fieldsForBlankCaculation) {
+      if (!StringUtils.isBlank(field) && field.length() > maxLengthOfField) {
         maxLengthOfField = field.length();
       }
     }
 
     for (String field : fieldsForBlankCaculation) {
-      if(!stringUtil.isEmpty(field)){
-      blankFieldsMap
-        .put(field,
-          addBlanksToString(
-            maxLengthOfField - field.length(), field));
+      if (!StringUtils.isBlank(field)) {
+      blankFieldsMap.put(field, addBlanksToString(maxLengthOfField - field.length(), field));
       }
-    }   
+    }
 
     for (SolrDocument doc : docList) {
       if (doc != null) {
         StringBuffer textTowrite = new StringBuffer();
 
         if (doc.getFieldValue(LogSearchConstants.LOGTIME) != null) {
-          textTowrite.append(doc.getFieldValue(LogSearchConstants.LOGTIME)
-              .toString() + " ");
+          textTowrite.append(doc.getFieldValue(LogSearchConstants.LOGTIME).toString() + " ");
         }
         if (doc.getFieldValue(LogSearchConstants.SOLR_LEVEL) != null) {
-          textTowrite.append(
-              doc.getFieldValue(LogSearchConstants.SOLR_LEVEL).toString())
-              .append(" ");
+          textTowrite.append(doc.getFieldValue(LogSearchConstants.SOLR_LEVEL).toString()).append(" ");
         }
         if (doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME) != null) {
-          textTowrite.append(
-              doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME).toString()
-                  .trim()).append(" ");
+          textTowrite.append(doc.getFieldValue(LogSearchConstants.SOLR_THREAD_NAME).toString().trim()).append(" ");
         }
         if (doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME) != null) {
-          textTowrite.append(
-              doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME).toString()
-                  .trim()).append(" ");
+          textTowrite.append(doc.getFieldValue(LogSearchConstants.SOLR_LOGGER_NAME).toString().trim()).append(" ");
         }
-        if (doc.getFieldValue(LogSearchConstants.SOLR_FILE) != null
-            && doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER) != null) {
+        if (doc.getFieldValue(LogSearchConstants.SOLR_FILE) != null && doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER) != null) {
           textTowrite
-              .append(
-                  doc.getFieldValue(LogSearchConstants.SOLR_FILE).toString())
+              .append(doc.getFieldValue(LogSearchConstants.SOLR_FILE).toString())
               .append(":")
-              .append(
-                  doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER)
-                      .toString()).append(" ");
+              .append(doc.getFieldValue(LogSearchConstants.SOLR_LINE_NUMBER).toString())
+              .append(" ");
         }
         if (doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE) != null) {
           textTowrite.append("- ")
-              .append(
-                  doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE)
-                      .toString());
+              .append(doc.getFieldValue(LogSearchConstants.SOLR_LOG_MESSAGE).toString());
         }
         textTowrite.append("\n");
         textToSave += textTowrite.toString();
@@ -137,7 +116,7 @@ public class BizUtil {
 
   public VSummary buildSummaryForLogFile(SolrDocumentList docList) {
     VSummary vsummary = new VSummary();
-    if(docList == null || docList.isEmpty()){
+    if (CollectionUtils.isEmpty(docList)) {
       return vsummary;
     }
     int numLogs = 0;
@@ -150,14 +129,14 @@ public class BizUtil {
         String hostname = (String) doc.getFieldValue("host");
         String comp = (String) doc.getFieldValue("type");
         String level = (String) doc.getFieldValue("level");
-        if (stringUtil.isEmpty(level)) {
+        if (StringUtils.isBlank(level)) {
           level = "";
         }
         boolean newHost = true;
         for (VHost host : vHosts) {
           if (host != null && host.getName().equals(hostname)) {
             newHost = false;
-            if (stringUtil.isEmpty(comp)) {
+            if (StringUtils.isBlank(comp)) {
               Set<String> compList = host.getComponents();
               if (compList != null) {
                 compList.add(comp);
@@ -168,11 +147,11 @@ public class BizUtil {
         }
         if (newHost) {
           VHost vHost = new VHost();
-          if (!stringUtil.isEmpty(hostname)) {
+          if (!StringUtils.isBlank(hostname)) {
             vHost.setName(hostname);
           }
           Set<String> component = new LinkedHashSet<String>();
-          if (stringUtil.isEmpty(comp)) {
+          if (StringUtils.isBlank(comp)) {
             component.add(comp);
           }
           vHost.setComponents(component);
@@ -191,8 +170,8 @@ public class BizUtil {
     return vsummary;
   }
 
-  public String addBlanksToString(int count, String field) {
-    if (stringUtil.isEmpty(field)) {
+  private String addBlanksToString(int count, String field) {
+    if (StringUtils.isBlank(field)) {
       return field;
     }
     if (count > 0) {
@@ -203,8 +182,7 @@ public class BizUtil {
   }
 
   @SuppressWarnings({"unchecked", "rawtypes"})
-  public VBarDataList buildSummaryForTopCounts(
-    SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
+  public VBarDataList buildSummaryForTopCounts(SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
 
     VBarDataList vBarDataList = new VBarDataList();
 
@@ -217,14 +195,12 @@ public class BizUtil {
     if (userList.isEmpty()) {
       return vBarDataList;
     }
-    SimpleOrderedMap<Map<String, Object>> userMap = (SimpleOrderedMap<Map<String, Object>>) userList
-      .get(0);
+    SimpleOrderedMap<Map<String, Object>> userMap = (SimpleOrderedMap<Map<String, Object>>) userList.get(0);
     if (userMap == null) {
       logger.info("No top user details found");
       return vBarDataList;
     }
-    List<SimpleOrderedMap> userUsageList = (List<SimpleOrderedMap>) userMap
-      .get("buckets");
+    List<SimpleOrderedMap> userUsageList = (List<SimpleOrderedMap>) userMap.get("buckets");
     if(userUsageList == null){
       return vBarDataList;
     }
@@ -232,13 +208,12 @@ public class BizUtil {
       if (usageMap != null) {
         VBarGraphData vBarGraphData = new VBarGraphData();
         String userName = (String) usageMap.get("val");
-        if (!stringUtil.isEmpty(userName)) {
+        if (!StringUtils.isBlank(userName)) {
           vBarGraphData.setName(userName);
         }
         SimpleOrderedMap repoMap = (SimpleOrderedMap) usageMap.get(innerJsonKey);
         List<VNameValue> componetCountList = new ArrayList<VNameValue>();
-        List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap
-            .get("buckets");
+        List<SimpleOrderedMap> repoUsageList = (List<SimpleOrderedMap>) repoMap.get("buckets");
         if (repoMap != null) {
           for (SimpleOrderedMap repoUsageMap : repoUsageList) {
             VNameValue componetCount = new VNameValue();
@@ -265,9 +240,8 @@ public class BizUtil {
     return vBarDataList;
   }
   
-  public HashMap<String, String> sortHashMapByValues(
-    HashMap<String, String> passedMap) {
-    if(passedMap == null ){
+  public HashMap<String, String> sortHashMapByValues(HashMap<String, String> passedMap) {
+    if (passedMap == null ) {
       return passedMap;
     }
     HashMap<String, String> sortedMap = new LinkedHashMap<String, String>();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
index 320e589..8ec04f8 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
@@ -18,49 +18,20 @@
  */
 package org.apache.ambari.logsearch.util;
 
-import java.io.Serializable;
 import java.security.SecureRandom;
 
-public class CommonUtil implements Serializable {
-  /**
-   *
-   */
-  private static final long serialVersionUID = -7284237762948427019L;
+import org.springframework.security.authentication.encoding.Md5PasswordEncoder;
 
-  static SecureRandom secureRandom = new SecureRandom();
-  static int counter = 0;
+public class CommonUtil {
+  private static SecureRandom secureRandom = new SecureRandom();
+  private static int counter = 0;
 
-  static public String genGUI() {
-    return System.currentTimeMillis() + "_" + secureRandom.nextInt(1000)
-      + "_" + counter++;
+  public static String genGUI() {
+    return System.currentTimeMillis() + "_" + secureRandom.nextInt(1000) + "_" + counter++;
   }
-
-  static public String genGUI(int length) {
-    String str = "";
-    for (int i = 0; i < length; i++) {
-      int ascii = genInteger(65, 90);
-      str += (char) ascii;
-    }
-    return str;
-  }
-
-  static public int genInteger() {
-    return secureRandom.nextInt();
-  }
-
-  static public int genInteger(int min, int max) {
-    int value = secureRandom.nextInt(max - min);
-    return value + min;
-  }
-
-  /**
-   * @return
-   */
-  public static long genLong() {
-    return secureRandom.nextLong();
-  }
-
-  static public int genInteger(int n) {
-    return secureRandom.nextInt();
+  
+  private static final Md5PasswordEncoder md5Encoder = new Md5PasswordEncoder();
+  public static String encryptPassword(String username, String password) {
+    return md5Encoder.encodePassword(password, username);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
index 2661150..88f92a2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
@@ -27,12 +27,13 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.manager.MgrBase;
+import org.apache.commons.lang.ArrayUtils;
 import org.apache.log4j.Logger;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 
 public class ConfigUtil {
-  static Logger logger = Logger.getLogger(MgrBase.class);
+  private static final Logger logger = Logger.getLogger(MgrBase.class);
 
   public static HashMap<String, String> serviceLogsColumnMapping = new HashMap<String, String>();
 
@@ -43,9 +44,7 @@ public class ConfigUtil {
     initializeColumnMapping();
   }
 
-  private static void intializeUISolrColumnMapping(
-    String columnMappingArray[],
-    HashMap<String, String> columnMappingMap) {
+  private static void intializeUISolrColumnMapping(String columnMappingArray[], HashMap<String, String> columnMappingMap) {
 
     if (columnMappingArray != null && columnMappingArray.length > 0) {
       for (String columnMapping : columnMappingArray) {
@@ -54,32 +53,24 @@ public class ConfigUtil {
           String solrField = mapping[0];
           String uiField = mapping[1];
           
-          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX,
-              uiField);
-          columnMappingMap.put(uiField + LogSearchConstants.UI_SUFFIX,
-              solrField);
+          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX, uiField);
+          columnMappingMap.put(uiField + LogSearchConstants.UI_SUFFIX, solrField);
         }
       }
     }
   }
   private static void initializeColumnMapping() {
-    String serviceLogsColumnMappingArray[] = PropertiesUtil
-      .getPropertyStringList("logsearch.solr.service.logs.column.mapping");
-    String auditLogsColumnMappingArray[] = PropertiesUtil
-      .getPropertyStringList("logsearch.solr.audit.logs.column.mapping");
+    String serviceLogsColumnMappingArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.service.logs.column.mapping");
+    String auditLogsColumnMappingArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.audit.logs.column.mapping");
 
     // Initializing column mapping for Service Logs
-    intializeUISolrColumnMapping(serviceLogsColumnMappingArray,
-      serviceLogsColumnMapping);
+    intializeUISolrColumnMapping(serviceLogsColumnMappingArray, serviceLogsColumnMapping);
 
     // Initializing column mapping for Audit Logs
-    intializeUISolrColumnMapping(auditLogsColumnMappingArray,
-      auditLogsColumnMapping);
+    intializeUISolrColumnMapping(auditLogsColumnMappingArray, auditLogsColumnMapping);
   }
 
-  
-  public static void extractSchemaFieldsName(String responseString,
-      HashMap<String, String> schemaFieldsNameMap,
+  public static void extractSchemaFieldsName(String responseString, HashMap<String, String> schemaFieldsNameMap,
       HashMap<String, String> schemaFieldTypeMap) {
     try {
       JSONObject jsonObject = new JSONObject(responseString);
@@ -118,17 +109,14 @@ public class ConfigUtil {
       schemaFieldsNameMap.putAll(_schemaFieldsNameMap);
       schemaFieldTypeMap.putAll(_schemaFieldTypeMap);
     } catch (Exception e) {
-      logger.error(e + "Credentials not specified in logsearch.properties "
-          + MessageEnums.ERROR_SYSTEM);
+      logger.error(e + "Credentials not specified in logsearch.properties " + MessageEnums.ERROR_SYSTEM);
     }
   }
 
   @SuppressWarnings("rawtypes")
-  public static void getSchemaFieldsName(String excludeArray[],
-      List<String> fieldNames, SolrDaoBase solrDaoBase) {
+  public static void getSchemaFieldsName(String excludeArray[], List<String> fieldNames, SolrDaoBase solrDaoBase) {
     if (!solrDaoBase.schemaFieldsNameMap.isEmpty()) {
-      Iterator iteratorSechmaFieldsName = solrDaoBase.schemaFieldsNameMap
-          .entrySet().iterator();
+      Iterator iteratorSechmaFieldsName = solrDaoBase.schemaFieldsNameMap.entrySet().iterator();
       while (iteratorSechmaFieldsName.hasNext()) {
         Map.Entry fieldName = (Map.Entry) iteratorSechmaFieldsName.next();
         String field = "" + fieldName.getKey();
@@ -140,7 +128,7 @@ public class ConfigUtil {
   }
 
   private static boolean isExclude(String name, String excludeArray[]) {
-    if (excludeArray != null && excludeArray.length > 0) {
+    if (!ArrayUtils.isEmpty(excludeArray)) {
       for (String exclude : excludeArray) {
         if (name.equals(exclude)){
           return true;

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
index 7425aaa..516d828 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
@@ -30,35 +30,30 @@ import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 @Component
 public class DateUtil {
 
-  static Logger logger = Logger.getLogger(DateUtil.class);
-
-  @Autowired
-  StringUtil stringUtil;
+  private static final Logger logger = Logger.getLogger(DateUtil.class);
 
   private DateUtil() {
 
   }
 
   public String addOffsetToDate(String date, Long utcOffset, String dateFormat) {
-    if (stringUtil.isEmpty(date)) {
+    if (StringUtils.isBlank(date)) {
       logger.debug("input date is empty or null.");
       return null;
     }
     if (utcOffset == null) {
-      logger
-          .debug("Utc offset is null, Return input date without adding offset.");
+      logger.debug("Utc offset is null, Return input date without adding offset.");
       return date;
     }
-    if (stringUtil.isEmpty(dateFormat)) {
-      logger
-          .debug("dateFormat is null or empty, Return input date without adding offset.");
+    if (StringUtils.isBlank(dateFormat)) {
+      logger.debug("dateFormat is null or empty, Return input date without adding offset.");
       return date;
     }
     String retDate = "";
@@ -67,10 +62,9 @@ public class DateUtil {
       if (date.contains(".")) {
         modifiedDate = date.replace(".", ",");
       }
-      SimpleDateFormat formatter = new SimpleDateFormat(dateFormat,
-          Locale.ENGLISH);
+      SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, Locale.ENGLISH);
       Date startDate = formatter.parse(modifiedDate);
-      long toWithOffset = getTimeWithOffset(startDate, utcOffset, dateFormat);
+      long toWithOffset = getTimeWithOffset(startDate, utcOffset);
       Calendar calendar = Calendar.getInstance();
       calendar.setTimeInMillis(toWithOffset);
       retDate = formatter.format(calendar.getTime());
@@ -80,98 +74,28 @@ public class DateUtil {
     return retDate;
   }
 
-  public long getTimeWithOffset(Date date, Long utcOffset, String dateFormate) {
+  private long getTimeWithOffset(Date date, Long utcOffset) {
     return date.getTime() + TimeUnit.MINUTES.toMillis(utcOffset);
   }
 
-  public Date getUTCDate(long epoh) {
-    if (epoh == 0) {
-      return null;
-    }
-    try {
-      TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT+0");
-      Calendar local = Calendar.getInstance();
-      int offset = local.getTimeZone().getOffset(epoh);
-      GregorianCalendar utc = new GregorianCalendar(gmtTimeZone);
-      utc.setTimeInMillis(epoh);
-      utc.add(Calendar.MILLISECOND, -offset);
-      return utc.getTime();
-    } catch (Exception ex) {
-      return null;
-    }
-  }
-
-  public String dateToString(Date date, String dateFormat) {
-    if (date == null || dateFormat == null || dateFormat.isEmpty()) {
-      return "";
-    }
-    SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, Locale.ENGLISH);
-    TimeZone timeZone = TimeZone.getTimeZone("GMT");
-    formatter.setTimeZone(timeZone);
-    return formatter.format(date);
-  }
-
   public String getCurrentDateInString() {
     DateFormat df = new SimpleDateFormat("MM-dd-yyyy HH:mm:ss", Locale.ENGLISH);
     Date today = Calendar.getInstance().getTime();
     return df.format(today);
   }
 
-  public String getTimeInSolrFormat(String timeString) {
-    String time;
-    if (stringUtil.isEmpty(timeString)) {
-      return null;
-    }
-    time = timeString.replace(" ", "T");
-    time = time.replace(",", ".");
-    time = time + "Z";
-
-    return time;
-  }
-  
   public Date getTodayFromDate() {
     Calendar c = new GregorianCalendar();
-    c.set(Calendar.HOUR_OF_DAY, 0); 
+    c.set(Calendar.HOUR_OF_DAY, 0);
     c.set(Calendar.MINUTE, 0);
     c.set(Calendar.SECOND, 0);
     return c.getTime();
   }
 
-  public Date addHoursToDate(Date date, int hours) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.HOUR_OF_DAY, hours);
-    return greorianCalendar.getTime();
-  }
-
-  public Date addMinsToDate(Date date, int mins) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.MINUTE, mins);
-    return greorianCalendar.getTime();
-  }
-
-  public Date addSecondsToDate(Date date, int secs) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.SECOND, secs);
-    return greorianCalendar.getTime();
-  }
-
-  public Date addMilliSecondsToDate(Date date, int secs) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.MILLISECOND, secs);
-    return greorianCalendar.getTime();
-  }
-
-  public String convertGivenDateFormatToSolrDateFormat(Date date)
-    throws ParseException {
+  public String convertGivenDateFormatToSolrDateFormat(Date date) throws ParseException {
     String time = date.toString();
-    SimpleDateFormat input = new SimpleDateFormat(
-      "EEE MMM dd HH:mm:ss zzz yyyy", Locale.ENGLISH);
-    SimpleDateFormat output = new SimpleDateFormat(
-      LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
+    SimpleDateFormat input = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.ENGLISH);
+    SimpleDateFormat output = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
     Date d = input.parse(time);
     TimeZone timeZone = TimeZone.getTimeZone("UTC");
     output.setTimeZone(timeZone);
@@ -183,53 +107,36 @@ public class DateUtil {
     if (date == null) {
       return "";
     }
-    SimpleDateFormat formatter = new SimpleDateFormat(
-      LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
+    SimpleDateFormat formatter = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
     TimeZone timeZone = TimeZone.getTimeZone("GMT");
     formatter.setTimeZone(timeZone);
 
     return formatter.format(date);
   }
 
-  public String convertSolrDateToNormalDateFormat(long d, long utcOffset)
-    throws ParseException {
+  public String convertSolrDateToNormalDateFormat(long d, long utcOffset) throws ParseException {
     Date date = new Date(d);
-    SimpleDateFormat formatter = new SimpleDateFormat(
-      LogSearchConstants.SOLR_DATE_FORMAT, Locale.ENGLISH);
+    SimpleDateFormat formatter = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT, Locale.ENGLISH);
     TimeZone timeZone = TimeZone.getTimeZone("GMT");
     formatter.setTimeZone(timeZone);
     String stringDate = formatter.format(date);
-    return addOffsetToDate(stringDate, Long.parseLong("" + utcOffset),
-      LogSearchConstants.SOLR_DATE_FORMAT);
+    return addOffsetToDate(stringDate, Long.parseLong("" + utcOffset), LogSearchConstants.SOLR_DATE_FORMAT);
 
   }
 
-  public Date convertStringToDate(String dateString) {
-    SimpleDateFormat formatter = new SimpleDateFormat(
-      LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
-    TimeZone timeZone = TimeZone.getTimeZone("GMT");
-    formatter.setTimeZone(timeZone);
-    try {
-      return formatter.parse(dateString);
-    } catch (ParseException e) {
-      //do nothing
-    }
-    return null;
-  }
-  
   public boolean isDateValid(String value) {
-    if(stringUtil.isEmpty(value)){
+    if (StringUtils.isBlank(value)) {
       return false;
     }
     Date date = null;
     try {
-        SimpleDateFormat sdf = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
-        date = sdf.parse(value);
-        if (!value.equals(sdf.format(date))) {
-            date = null;
-        }
+      SimpleDateFormat sdf = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
+      date = sdf.parse(value);
+      if (!value.equals(sdf.format(date))) {
+        date = null;
+      }
     } catch (Exception ex) {
-      //do nothing
+      // do nothing
     }
     return date != null;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
index 658635c..e3c2063 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/FileUtil.java
@@ -39,24 +39,19 @@ import org.springframework.stereotype.Component;
 @Component
 public class FileUtil {
 
-  static Logger logger = Logger.getLogger(FileUtil.class);
+  private static final Logger logger = Logger.getLogger(FileUtil.class);
 
   @Autowired
-  RESTErrorUtil restErrorUtil;
+  private RESTErrorUtil restErrorUtil;
 
-  @SuppressWarnings("resource")
   public Response saveToFile(String text, String fileName, VSummary vsummary) {
     String mainExportedFile = "";
     FileOutputStream fis = null;
     try {
-      mainExportedFile = mainExportedFile
-        + "**********************Summary**********************\n";
-      mainExportedFile = mainExportedFile + "Number of Logs : "
-        + vsummary.getNumberLogs() + "\n";
-      mainExportedFile = mainExportedFile + "From           : "
-        + vsummary.getFrom() + "\n";
-      mainExportedFile = mainExportedFile + "To             : "
-        + vsummary.getTo() + "\n";
+      mainExportedFile = mainExportedFile + "**********************Summary**********************\n";
+      mainExportedFile = mainExportedFile + "Number of Logs : " + vsummary.getNumberLogs() + "\n";
+      mainExportedFile = mainExportedFile + "From           : " + vsummary.getFrom() + "\n";
+      mainExportedFile = mainExportedFile + "To             : " + vsummary.getTo() + "\n";
 
       List<VHost> hosts = vsummary.getHosts();
       String blankCharacterForHost = String.format("%-8s", "");
@@ -89,42 +84,30 @@ public class FileUtil {
           blankCharacterForHost = String.format("%-3s", blankCharacterForHost);
         }
         if (numberHost == 1) {
-          mainExportedFile = mainExportedFile + "Host"
-            + blankCharacterForHost + "   : " + h + " [" + c
-            + "] " + "\n";
+          mainExportedFile = mainExportedFile + "Host" + blankCharacterForHost + "   : " + h + " [" + c + "] " + "\n";
         } else if (numberHost > 1) {
-          mainExportedFile = mainExportedFile + "Host_" + numberHost
-            + blankCharacterForHost + " : " + h + " [" + c
-            + "] " + "\n";
+          mainExportedFile = mainExportedFile + "Host_" + numberHost + blankCharacterForHost + " : " + h + " [" + c + "] " + "\n";
         }
 
       }
-      mainExportedFile = mainExportedFile + "Levels"+String.format("%-9s", blankCharacterForHost)+": "
-        + vsummary.getLevels() + "\n";
-      mainExportedFile = mainExportedFile + "Format"+String.format("%-9s", blankCharacterForHost)+": "
-        + vsummary.getFormat() + "\n";
+      mainExportedFile = mainExportedFile + "Levels"+String.format("%-9s", blankCharacterForHost)+": " + vsummary.getLevels() + "\n";
+      mainExportedFile = mainExportedFile + "Format"+String.format("%-9s", blankCharacterForHost)+": " + vsummary.getFormat() + "\n";
       mainExportedFile = mainExportedFile + "\n";
 
-      mainExportedFile = mainExportedFile + "Included String: ["
-        + vsummary.getIncludeString() + "]\n\n";
-      mainExportedFile = mainExportedFile + "Excluded String: ["
-        + vsummary.getExcludeString() + "]\n\n";
-      mainExportedFile = mainExportedFile
-        + "************************Logs***********************"
-        + "\n";
+      mainExportedFile = mainExportedFile + "Included String: [" + vsummary.getIncludeString() + "]\n\n";
+      mainExportedFile = mainExportedFile + "Excluded String: [" + vsummary.getExcludeString() + "]\n\n";
+      mainExportedFile = mainExportedFile + "************************Logs***********************" + "\n";
       mainExportedFile = mainExportedFile + text + "\n";
       File file = File.createTempFile(fileName, vsummary.getFormat());
       fis = new FileOutputStream(file);
       fis.write(mainExportedFile.getBytes());
       return Response
         .ok(file, MediaType.APPLICATION_OCTET_STREAM)
-        .header("Content-Disposition",
-          "attachment;filename=" + fileName
-            + vsummary.getFormat()).build();
+        .header("Content-Disposition", "attachment;filename=" + fileName + vsummary.getFormat())
+        .build();
     } catch (Exception e) {
       logger.error(e.getMessage());
-      throw restErrorUtil.createRESTException(e.getMessage(),
-        MessageEnums.ERROR_SYSTEM);
+      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
     } finally {
       if (fis != null) {
         try {
@@ -135,13 +118,8 @@ public class FileUtil {
     }
   }
 
-  /**
-   * @param filename
-   * @return
-   */
   public File getFileFromClasspath(String filename) {
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
-      .getResource(filename);
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename);
     logger.debug("File Complete URI :" + fileCompleteUrl);
     File file = null;
     try {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
index 4703c58..5332d18 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/JSONUtil.java
@@ -22,15 +22,14 @@ package org.apache.ambari.logsearch.util;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonGenerationException;
 import org.codehaus.jackson.JsonParseException;
 import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
@@ -47,112 +46,51 @@ import com.google.gson.GsonBuilder;
 @Component
 public class JSONUtil {
 
-  static Logger logger = Logger.getLogger(JSONUtil.class);
+  private static final Logger logger = Logger.getLogger(JSONUtil.class);
 
   @Autowired
-  RESTErrorUtil restErrorUtil;
+  private RESTErrorUtil restErrorUtil;
 
-  @Autowired
-  StringUtil stringUtil;
-
-  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-  Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
-
-  // Conversion from JSONArray to List<String>
-  public static List<String> JSONToList(JSONArray jarray) throws JSONException {
-    ArrayList<String> list = new ArrayList<String>();
-    JSONArray jsonArray = jarray;
-    if (jsonArray != null) {
-      int len = jsonArray.length();
-      for (int i = 0; i < len; i++) {
-        list.add(jsonArray.get(i).toString());
-      }
-    }
-    return list;
-  }
-
-  @SuppressWarnings("unchecked")
-  public HashMap<String, String> jsonToMap(String jsonStr) {
-    if (stringUtil.isEmpty(jsonStr)) {
-      logger.info("jsonString is empty, cannot conver to map");
-      return null;
-    }
-    ObjectMapper mapper = new ObjectMapper();
-    try {
-      Object tempObject = mapper.readValue(jsonStr,
-          new TypeReference<HashMap<String, String>>() {
-          });
-      return (HashMap<String, String>) tempObject;
-
-    } catch (JsonParseException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
-    } catch (JsonMappingException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
-    } catch (IOException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
-    }
-
-  }
+  private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  private Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
 
   @SuppressWarnings("unchecked")
   public HashMap<String, Object> jsonToMapObject(String jsonStr) {
-    if (stringUtil.isEmpty(jsonStr)) {
+    if (StringUtils.isBlank(jsonStr)) {
       logger.info("jsonString is empty, cannot conver to map");
       return null;
     }
     ObjectMapper mapper = new ObjectMapper();
     try {
-      Object tempObject = mapper.readValue(jsonStr,
-          new TypeReference<HashMap<String, Object>>() {
-          });
+      Object tempObject = mapper.readValue(jsonStr, new TypeReference<HashMap<String, Object>>() {});
       return (HashMap<String, Object>) tempObject;
 
     } catch (JsonParseException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (JsonMappingException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (IOException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     }
 
   }
 
   @SuppressWarnings("unchecked")
   public List<HashMap<String, Object>> jsonToMapObjectList(String jsonStr) {
-    if (stringUtil.isEmpty(jsonStr)) {
+    if (StringUtils.isBlank(jsonStr)) {
       return null;
     }
     ObjectMapper mapper = new ObjectMapper();
     try {
-      Object tempObject = mapper.readValue(jsonStr,
-          new TypeReference<List<HashMap<String, Object>>>() {
-          });
+      Object tempObject = mapper.readValue(jsonStr, new TypeReference<List<HashMap<String, Object>>>() {});
       return (List<HashMap<String, Object>>) tempObject;
 
     } catch (JsonParseException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (JsonMappingException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     } catch (IOException e) {
-      throw restErrorUtil.createRESTException(
-          "Invalid input data: " + e.getMessage(),
-          MessageEnums.INVALID_INPUT_DATA);
+      throw restErrorUtil.createRESTException("Invalid input data: " + e.getMessage(), MessageEnums.INVALID_INPUT_DATA);
     }
 
   }
@@ -170,21 +108,11 @@ public class JSONUtil {
     return true;
   }
 
-  /**
-   * @param fileName
-   * @return
-   */
   public HashMap<String, Object> readJsonFromFile(File jsonFile) {
     ObjectMapper mapper = new ObjectMapper();
     try {
-      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile,
-          new TypeReference<HashMap<String, Object>>() {
-          });
+      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {});
       return jsonmap;
-    } catch (JsonParseException e) {
-      logger.error(e, e.getCause());
-    } catch (JsonMappingException e) {
-      logger.error(e, e.getCause());
     } catch (IOException e) {
       logger.error(e, e.getCause());
     }
@@ -197,10 +125,6 @@ public class JSONUtil {
       String json = om.writeValueAsString(map);
 
       return json;
-    } catch (JsonGenerationException e) {
-      logger.error(e, e.getCause());
-    } catch (JsonMappingException e) {
-      logger.error(e, e.getCause());
     } catch (IOException e) {
       logger.error(e, e.getCause());
     }
@@ -209,13 +133,8 @@ public class JSONUtil {
 
   /**
    * WRITE JOSN IN FILE ( Delete existing file and create new file)
-   * 
-   * @param jsonStr
-   * @param outputFile
-   * @param beautify
    */
-  public synchronized void writeJSONInFile(String jsonStr, File outputFile,
-      boolean beautify) {
+  public synchronized void writeJSONInFile(String jsonStr, File outputFile, boolean beautify) {
     FileWriter fileWriter = null;
     if (outputFile == null) {
       logger.error("user_pass json file can't be null.");
@@ -231,14 +150,11 @@ public class JSONUtil {
         if (beautify) {
           ObjectMapper mapper = new ObjectMapper();
           Object json = mapper.readValue(jsonStr, Object.class);
-          jsonStr = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(
-              json);
+          jsonStr = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(json);
         }
         fileWriter.write(jsonStr);
       } else {
-        logger
-            .error("Applcation does not have permission to update file to write enc_password. file="
-                + outputFile.getAbsolutePath());
+        logger.error("Applcation does not have permission to update file to write enc_password. file="+ outputFile.getAbsolutePath());
       }
     } catch (IOException e) {
       logger.error("Error writing to password file.", e.getCause());
@@ -248,7 +164,6 @@ public class JSONUtil {
           fileWriter.flush();
           fileWriter.close();
         } catch (Exception exception) {
-          // ignore
           logger.error(exception);
         }
       }
@@ -265,14 +180,9 @@ public class JSONUtil {
 
   /**
    * GET VALUES FROM JSON BY GIVING KEY RECURSIVELY
-   * 
-   * @param jsonStr
-   * @param keyName
-   * @return
    */
   @SuppressWarnings("rawtypes")
-  public static String getValuesOfKey(String jsonStr, String keyName,
-      List<String> values) {
+  public static String getValuesOfKey(String jsonStr, String keyName, List<String> values) {
     if (values == null) {
       return null;
     }
@@ -318,13 +228,11 @@ public class JSONUtil {
           // ignore
         }
 
-      } else if ((((JSONObject) jsonObj).optJSONArray(key) != null)
-          || (((JSONObject) jsonObj).optJSONObject(key) != null)) {
+      } else if ((((JSONObject) jsonObj).optJSONArray(key) != null) || (((JSONObject) jsonObj).optJSONObject(key) != null)) {
 
         String str = null;
         try {
-          str = getValuesOfKey("" + ((JSONObject) jsonObj).getString(key),
-              keyName, values);
+          str = getValuesOfKey("" + ((JSONObject) jsonObj).getString(key), keyName, values);
         } catch (Exception e) {
           // ignore
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/LogsearchPropertiesConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/LogsearchPropertiesConfiguration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/LogsearchPropertiesConfiguration.java
deleted file mode 100644
index c3ef20f..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/LogsearchPropertiesConfiguration.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.util;
-
-import java.io.File;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Iterator;
-
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.log4j.Logger;
-
-public class LogsearchPropertiesConfiguration extends PropertiesConfiguration {
-
-  Logger logger = Logger.getLogger(LogsearchPropertiesConfiguration.class);
-
-  public LogsearchPropertiesConfiguration() {
-    super();
-  }
-
-
-  public static LogsearchPropertiesConfiguration getInstance() {
-    return new LogsearchPropertiesConfiguration();
-  }
-
-  public void load(File file) {
-    if (!file.exists()) {
-      logger.error("File :" + file.getAbsolutePath() + " not exists");
-      return;
-    }
-    try {
-      super.load(file);
-    } catch (ConfigurationException e) {
-      logger.error(e);
-    }
-  }
-
-  public void load(String fileAbsolutePath) {
-    File file = new File(fileAbsolutePath);
-    load(file);
-  }
-
-  /**
-   * Load from classPath
-   *
-   * @param fileName
-   */
-  public void loadFromClasspath(String fileName) {
-    logger.debug("loading config properties : " + fileName);
-    // load file from classpath
-    try {
-      URL fileCompleteUrl = Thread.currentThread()
-        .getContextClassLoader().getResource(fileName);
-      logger.debug("File Complete URI :" + fileCompleteUrl);
-      File file = new File(fileCompleteUrl.toURI());
-      load(file);
-    } catch (Exception e) {
-      logger.error(e);
-    }
-  }
-
-  public HashMap<String, Object> getPropertyMap() {
-    HashMap<String, Object> propertyMap = new HashMap<String, Object>();
-    Iterator<String> keys = this.getKeys();
-    while (keys.hasNext()) {
-      String key = keys.next();
-      propertyMap.put(key, this.getProperty(key));
-    }
-    return propertyMap;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
index 16ebae2..59f0296 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/PropertiesUtil.java
@@ -34,9 +34,11 @@ import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
 import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
 
 public class PropertiesUtil extends PropertyPlaceholderConfigurer {
-  static Logger logger = Logger.getLogger(PropertiesUtil.class);
-  private static Map<String, String> propertiesMap;
+  private static final Logger logger = Logger.getLogger(PropertiesUtil.class);
+  
   private static final String LOGSEARCH_PROP_FILE="logsearch.properties";
+  
+  private static Map<String, String> propertiesMap;
 
   private PropertiesUtil() {
 
@@ -45,8 +47,7 @@ public class PropertiesUtil extends PropertyPlaceholderConfigurer {
  static {
     propertiesMap = new HashMap<String, String>();
     Properties properties = new Properties();
-    URL fileCompleteUrl = Thread.currentThread()
-        .getContextClassLoader().getResource(LOGSEARCH_PROP_FILE);
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(LOGSEARCH_PROP_FILE);
     FileInputStream fileInputStream = null;
     try {
       File file = new File(fileCompleteUrl.toURI());
@@ -69,9 +70,7 @@ public class PropertiesUtil extends PropertyPlaceholderConfigurer {
   }
 
   @Override
-  protected void processProperties(
-    ConfigurableListableBeanFactory beanFactory, Properties props)
-    throws BeansException {
+  protected void processProperties(ConfigurableListableBeanFactory beanFactory, Properties props) throws BeansException {
     super.processProperties(beanFactory, props);
 
     propertiesMap = new HashMap<String, String>();
@@ -80,8 +79,7 @@ public class PropertiesUtil extends PropertyPlaceholderConfigurer {
     Set<Object> keySet = System.getProperties().keySet();
     for (Object key : keySet) {
       String keyStr = key.toString();
-      propertiesMap.put(keyStr, System.getProperties()
-        .getProperty(keyStr).trim());
+      propertiesMap.put(keyStr, System.getProperties().getProperty(keyStr).trim());
     }
 
     // add our properties now

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
index d1e0a86..5961cff 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/QueryBase.java
@@ -20,13 +20,10 @@
 package org.apache.ambari.logsearch.util;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.commons.lang.StringUtils;
 import org.apache.solr.client.solrj.SolrQuery;
-import org.springframework.beans.factory.annotation.Autowired;
 
 public class QueryBase {
-  
-  @Autowired
-  StringUtil stringUtil;
 
   //Solr Facet Methods
   public void setFacetField(SolrQuery solrQuery, String facetField) {
@@ -48,8 +45,7 @@ public class QueryBase {
     solrQuery.setFacetSort(sortType);
   }
 
-  public void setFacetPivot(SolrQuery solrQuery, int mincount,
-                            String... hirarchy) {
+  public void setFacetPivot(SolrQuery solrQuery, int mincount, String... hirarchy) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
     solrQuery.set(LogSearchConstants.FACET_PIVOT, hirarchy);
@@ -57,8 +53,7 @@ public class QueryBase {
     setFacetLimit(solrQuery, -1);
   }
 
-  public void setFacetDate(SolrQuery solrQuery, String facetField,
-                           String from, String to, String unit) {
+  public void setFacetDate(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
     solrQuery.set(LogSearchConstants.FACET_DATE, facetField);
@@ -69,8 +64,7 @@ public class QueryBase {
     setFacetLimit(solrQuery, -1);
   }
 
-  public void setFacetRange(SolrQuery solrQuery, String facetField,
-                            String from, String to, String unit) {
+  public void setFacetRange(SolrQuery solrQuery, String facetField, String from, String to, String unit) {
     solrQuery.setFacet(true);
     setRowCount(solrQuery, 0);
     solrQuery.set(LogSearchConstants.FACET_RANGE, facetField);
@@ -96,7 +90,7 @@ public class QueryBase {
   //Main Query
   public void setMainQuery(SolrQuery solrQuery, String query) {
     String defalultQuery = "*:*";
-    if (stringUtil.isEmpty(query)){
+    if (StringUtils.isBlank(query)){
       solrQuery.setQuery(defalultQuery);
     }else{
       solrQuery.setQuery(query);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
index e64f742..6df5b05 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/RESTErrorUtil.java
@@ -54,7 +54,7 @@ public class RESTErrorUtil {
     return webAppEx;
   }
 
-  public WebApplicationException createRESTException(VResponse response, int sc) {
+  private WebApplicationException createRESTException(VResponse response, int sc) {
     Response errorResponse = Response.status(sc).entity(response).build();
     WebApplicationException restException = new WebApplicationException(errorResponse);
     restException.fillInStackTrace();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
index ee706bf..bcf9605 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
@@ -25,23 +25,21 @@ import java.util.Locale;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.log4j.Logger;
 import org.apache.solr.schema.TrieDoubleField;
 import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieIntField;
 import org.apache.solr.schema.TrieLongField;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
 import org.springframework.stereotype.Component;
+import org.springframework.util.CollectionUtils;
 
 @Component
 public class SolrUtil {
-  static final Logger logger = Logger.getLogger("org.apache.ambari.logsearch");
-  
-  @Autowired
-  StringUtil stringUtil;
 
   @Autowired
-  JSONUtil jsonUtil;
+  private JSONUtil jsonUtil;
 
   public String setField(String fieldName, String value) {
     if (value == null || value.trim().length() == 0) {
@@ -50,13 +48,8 @@ public class SolrUtil {
     return fieldName + ":" + value.trim().toLowerCase(Locale.ENGLISH);
   }
 
-  /**
-   * @param string
-   * @param myClassTypes
-   * @return
-   */
   public String inList(String fieldName, int[] values) {
-    if (values == null || values.length == 0) {
+    if (ArrayUtils.isEmpty(values)) {
       return "";
     }
     String expr = "";
@@ -71,12 +64,8 @@ public class SolrUtil {
     }
   }
 
-  /**
-   * @param familyUserIdSet
-   * @return
-   */
   public String inList(Collection<Long> values) {
-    if (values == null || values.isEmpty()) {
+    if (CollectionUtils.isEmpty(values)) {
       return "";
     }
     String expr = "";
@@ -92,18 +81,12 @@ public class SolrUtil {
 
   }
 
-  /**
-   * @param wildCard
-   * @param string
-   * @param searchList
-   * @return
-   */
   public String orList(String fieldName, String[] valueList, String wildCard) {
-    if (valueList == null || valueList.length == 0) {
+    if (ArrayUtils.isEmpty(valueList)) {
       return "";
     }
     
-    if(stringUtil.isEmpty(wildCard)){
+    if (StringUtils.isBlank(wildCard)) {
       wildCard = "";
     }
     
@@ -126,18 +109,12 @@ public class SolrUtil {
 
   }
 
-  /**
-   * @param wildCard
-   * @param string
-   * @param searchList
-   * @return
-   */
   public String andList(String fieldName, String[] valueList, String wildCard) {
-    if (valueList == null || valueList.length == 0) {
+    if (ArrayUtils.isEmpty(valueList)) {
       return "";
     }
     
-    if(stringUtil.isEmpty(wildCard)){
+    if (StringUtils.isBlank(wildCard)) {
       wildCard = "";
     }
     
@@ -162,9 +139,6 @@ public class SolrUtil {
 
   /**
    * Copied from Solr ClientUtils.escapeQueryChars and removed escaping *
-   * 
-   * @param s
-   * @return
    */
   public String escapeQueryChars(String s) {
     StringBuilder sb = new StringBuilder();
@@ -195,7 +169,7 @@ public class SolrUtil {
     return sb.toString();
   }
 
-  public String escapeForWhiteSpaceTokenizer(String search) {
+  private String escapeForWhiteSpaceTokenizer(String search) {
     if (search == null) {
       return null;
     }
@@ -233,30 +207,24 @@ public class SolrUtil {
     return newSearch;
   }
 
-  public String escapeForKeyTokenizer(String search) {
-    if (search.startsWith("*") && search.endsWith("*")
-        && !stringUtil.isEmpty(search)) {
+  private String escapeForKeyTokenizer(String search) {
+    if (search.startsWith("*") && search.endsWith("*") && !StringUtils.isBlank(search)) {
       // Remove the * from both the sides
       if (search.length() > 1) {
         search = search.substring(1, search.length() - 1);
-      }else{
+      } else {
         //search string have only * 
         search="";
       }
     }
-    // Escape the string
     search = escapeQueryChars(search);
 
-    // Add the *
     return "*" + search + "*";
   }
 
   /**
    * This is a special case scenario to handle log_message for wild card
    * scenarios
-   * 
-   * @param search
-   * @return
    */
   public String escapeForLogMessage(String field, String search) {
     if (search.startsWith("*") && search.endsWith("*")) {
@@ -272,8 +240,7 @@ public class SolrUtil {
 
   public String makeSolrSearchString(String search) {
     String newString = search.trim();
-    String newSearch = newString.replaceAll(
-        "(?=[]\\[+&|!(){},:\"^~/=$@%?:.\\\\])", "\\\\");
+    String newSearch = newString.replaceAll("(?=[]\\[+&|!(){},:\"^~/=$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
     newSearch = newSearch.replace("\t", "*");
     newSearch = newSearch.replace("\r", "*");
@@ -284,8 +251,7 @@ public class SolrUtil {
 
   public String makeSolrSearchStringWithoutAsterisk(String search) {
     String newString = search.trim();
-    String newSearch = newString.replaceAll(
-        "(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\");
+    String newSearch = newString.replaceAll("(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
     newSearch = newSearch.replace("\t", "*");
     newSearch = newSearch.replace("\r", "*");
@@ -296,19 +262,18 @@ public class SolrUtil {
   }
 
   public String makeSearcableString(String search) {
-    if (search == null || search.isEmpty()){
+    if (StringUtils.isBlank(search)) {
       return "";
     }
     String newSearch = search.replaceAll("[\\t\\n\\r]", " ");
-    newSearch = newSearch.replaceAll("(?=[]\\[+&|!(){}^~=$/@%?:.\\\\-])",
-        "\\\\");
+    newSearch = newSearch.replaceAll("(?=[]\\[+&|!(){}^~=$/@%?:.\\\\-])", "\\\\");
 
     return newSearch.replace(" ", "\\ ");
   }
   
 
   public boolean isSolrFieldNumber(String fieldType,SolrDaoBase solrDaoBase) {
-    if (stringUtil.isEmpty(fieldType)) {
+    if (StringUtils.isBlank(fieldType)) {
       return false;
     } else {
       HashMap<String, Object> typeInfoMap = getFieldTypeInfoMap(fieldType,solrDaoBase);
@@ -344,7 +309,7 @@ public class SolrUtil {
       return new HashMap<String, Object>();
     }
     String classname = (String) fieldTypeMap.get("class");
-    if (!stringUtil.isEmpty(classname)) {
+    if (!StringUtils.isBlank(classname)) {
       classname = classname.replace("solr.", "");
       fieldTypeMap.put("class", classname);
     }


[48/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
index c57c028..5fc2e14 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/reader/LogsearchReaderFactory.java
@@ -27,17 +27,15 @@ import org.apache.log4j.Logger;
 
 public enum LogsearchReaderFactory {
   INSTANCE;
-  private static Logger logger = Logger
-    .getLogger(LogsearchReaderFactory.class);
+  private static final Logger LOG = Logger.getLogger(LogsearchReaderFactory.class);
 
   public Reader getReader(File file) throws FileNotFoundException {
-    logger.debug("Inside reader factory for file:" + file);
+    LOG.debug("Inside reader factory for file:" + file);
     if (GZIPReader.isValidFile(file.getAbsolutePath())) {
-      logger.info("Reading file " + file + " as gzip file");
+      LOG.info("Reading file " + file + " as gzip file");
       return new GZIPReader(file.getAbsolutePath());
     } else {
       return new FileReader(file);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
deleted file mode 100644
index ae0cfc0..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TimeZone;
-
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.util.SolrUtil;
-import org.apache.ambari.logfeeder.view.VLogfeederFilter;
-import org.apache.ambari.logfeeder.view.VLogfeederFilterWrapper;
-import org.apache.log4j.Logger;
-
-public class FetchConfigFromSolr extends Thread {
-  private static Logger logger = Logger.getLogger(FetchConfigFromSolr.class);
-  private static VLogfeederFilterWrapper logfeederFilterWrapper = null;
-  private static int solrConfigInterval = 5;// 5 sec;
-  private static long delay;
-  private static String endTimeDateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS";//2016-04-05T04:30:00.000Z
-  private static String sysTimeZone = "GMT";
-
-  FetchConfigFromSolr(boolean isDaemon) {
-    this.setName(this.getClass().getSimpleName());
-    this.setDaemon(isDaemon);
-  }
-
-  @Override
-  public void run() {
-    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
-    String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url");
-    if ((zkConnectString == null || zkConnectString.trim().length() == 0 )
-        && (solrUrl == null || solrUrl.trim().length() == 0)) {
-      logger.warn("Neither Solr ZK Connect String nor solr Uril for UserConfig/History is set." +
-          "Won't look for level configuration from Solr.");
-      return;
-    }
-    solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", solrConfigInterval);
-    delay = 1000 * solrConfigInterval;
-    do {
-      logger.debug("Updating config from solr after every " + solrConfigInterval + " sec.");
-      pullConfigFromSolr();
-      try {
-        Thread.sleep(delay);
-      } catch (InterruptedException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
-      }
-    } while (true);
-  }
-
-  private synchronized void pullConfigFromSolr() {
-    SolrUtil solrUtil = SolrUtil.getInstance();
-    if(solrUtil!=null){
-      HashMap<String, Object> configDocMap = solrUtil.getConfigDoc();
-      if (configDocMap != null) {
-        String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
-        if (configJson != null) {
-          logfeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, VLogfeederFilterWrapper.class);
-        }
-      }
-    }
-  }
-
-  private static boolean isFilterExpired(VLogfeederFilter logfeederFilter) {
-    boolean isFilterExpired = false;// default is false
-    if (logfeederFilter != null) {
-      Date filterEndDate = parseFilterExpireDate(logfeederFilter);
-      if (filterEndDate != null) {
-        Date currentDate = getCurrentDate();
-        if (currentDate.compareTo(filterEndDate) >= 0) {
-          logger.debug("Filter for  Component :" + logfeederFilter.getLabel() + " and Hosts :"
-            + listToStr(logfeederFilter.getHosts()) + "Filter is expired because of filter endTime : "
-            + dateToStr(filterEndDate) + " is older than currentTime :" + dateToStr(currentDate));
-          isFilterExpired = true;
-        }
-      }
-    }
-    return isFilterExpired;
-  }
-
-  private static String dateToStr(Date date) {
-    if (date == null) {
-      return "";
-    }
-    SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
-    TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
-    formatter.setTimeZone(timeZone);
-    return formatter.format(date);
-  }
-
-  private static Date parseFilterExpireDate(VLogfeederFilter vLogfeederFilter) {
-    String expiryTime = vLogfeederFilter.getExpiryTime();
-    if (expiryTime != null && !expiryTime.isEmpty()) {
-      SimpleDateFormat formatter = new SimpleDateFormat(endTimeDateFormat);
-      TimeZone timeZone = TimeZone.getTimeZone(sysTimeZone);
-      formatter.setTimeZone(timeZone);
-      try {
-        return formatter.parse(expiryTime);
-      } catch (ParseException e) {
-        logger.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel()
-          + " and hosts :" + listToStr(vLogfeederFilter.getHosts()));
-      }
-    }
-    return null;
-  }
-
-  public static List<String> getAllowedLevels(String hostName, VLogfeederFilter componentFilter) {
-    String componentName = componentFilter.getLabel();
-    List<String> hosts = componentFilter.getHosts();
-    List<String> defaultLevels = componentFilter.getDefaultLevels();
-    List<String> overrideLevels = componentFilter.getOverrideLevels();
-    String expiryTime=componentFilter.getExpiryTime();
-    //check is user override or not
-    if ((expiryTime != null && !expiryTime.isEmpty())
-        || (overrideLevels != null && !overrideLevels.isEmpty())
-        || (hosts != null && !hosts.isEmpty())) {
-      if (hosts == null || hosts.isEmpty()) {
-        // hosts list is empty or null consider it apply on all hosts
-        hosts.add(LogFeederConstants.ALL);
-      }
-      if (LogFeederUtil.isListContains(hosts, hostName, false)) {
-        if (isFilterExpired(componentFilter)) {
-          logger.debug("Filter for component " + componentName + " and host :"
-              + hostName + " is expired at " + componentFilter.getExpiryTime());
-          return defaultLevels;
-        } else {
-          return overrideLevels;
-        }
-      }
-    }
-    return defaultLevels;
-  }
-
-  public static boolean isFilterAvailable() {
-    return logfeederFilterWrapper != null;
-  }
-  
-  public static VLogfeederFilter findComponentFilter(String componentName) {
-    if (logfeederFilterWrapper != null) {
-      HashMap<String, VLogfeederFilter> filter = logfeederFilterWrapper.getFilter();
-      if (filter != null) {
-        VLogfeederFilter componentFilter = filter.get(componentName);
-        if (componentFilter != null) {
-          return componentFilter;
-        }
-      }
-    }
-    logger.trace("Filter is not there for component :" + componentName);
-    return null;
-  }
-
-
-  public static Date getCurrentDate() {
-    TimeZone.setDefault(TimeZone.getTimeZone(sysTimeZone));
-    Date date = new Date();
-    return date;
-  }
-
-  public static String listToStr(List<String> strList) {
-    StringBuilder out = new StringBuilder("[");
-    if (strList != null) {
-      int counter = 0;
-      for (Object o : strList) {
-        if (counter > 0) {
-          out.append(",");
-        }
-        out.append(o.toString());
-        counter++;
-      }
-    }
-    out.append("]");
-    return out.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
new file mode 100644
index 0000000..801a289
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FilterLogData.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Read configuration from solr and filter the log
+ */
+public enum FilterLogData {
+  INSTANCE;
+  
+  private static final Logger LOG = Logger.getLogger(FilterLogData.class);
+  
+  private static final boolean DEFAULT_VALUE = true;
+
+  public boolean isAllowed(String jsonBlock) {
+    if (StringUtils.isEmpty(jsonBlock)) {
+      return DEFAULT_VALUE;
+    }
+    Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
+    return isAllowed(jsonObj);
+  }
+
+  public boolean isAllowed(Map<String, Object> jsonObj) {
+    boolean isAllowed = applyFilter(jsonObj);
+    if (!isAllowed) {
+      LOG.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj));
+    }
+    return isAllowed;
+  }
+  
+
+  private boolean applyFilter(Map<String, Object> jsonObj) {
+    if (MapUtils.isEmpty(jsonObj)) {
+      LOG.warn("Output jsonobj is empty");
+      return DEFAULT_VALUE;
+    }
+    
+    String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST);
+    String componentName = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT);
+    String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL);
+    if (StringUtils.isNotBlank(hostName) && StringUtils.isNotBlank(componentName) && StringUtils.isNotBlank(level)) {
+      LogFeederFilter componentFilter = LogConfigHandler.findComponentFilter(componentName);
+      if (componentFilter == null) {
+        return DEFAULT_VALUE;
+      }
+      List<String> allowedLevels = LogConfigHandler.getAllowedLevels(hostName, componentFilter);
+      if (CollectionUtils.isEmpty(allowedLevels)) {
+        allowedLevels.add(LogFeederConstants.ALL);
+      }
+      return LogFeederUtil.isListContains(allowedLevels, level, false);
+    }
+    else {
+      return DEFAULT_VALUE;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java
new file mode 100644
index 0000000..12c744c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigFetcher.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.SolrRequest.METHOD;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.CollectionAdminResponse;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrException;
+
+public class LogConfigFetcher {
+  private static final Logger LOG = Logger.getLogger(LogConfigFetcher.class);
+  
+  private static LogConfigFetcher instance;
+  public synchronized static LogConfigFetcher getInstance() {
+    if (instance == null) {
+      try {
+        instance = new LogConfigFetcher();
+      } catch (Exception e) {
+        String logMessageKey = LogConfigFetcher.class.getSimpleName() + "_SOLR_UTIL";
+              LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error constructing solrUtil", e, LOG, Level.WARN);
+      }
+    }
+    return instance;
+  }
+
+  private SolrClient solrClient;
+
+  private String solrDetail = "";
+
+  public LogConfigFetcher() throws Exception {
+    String url = LogFeederUtil.getStringProperty("logfeeder.solr.url");
+    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
+    String collection = LogFeederUtil.getStringProperty("logfeeder.solr.core.config.name", "history");
+    connectToSolr(url, zkConnectString, collection);
+  }
+
+  private SolrClient connectToSolr(String url, String zkConnectString, String collection) throws Exception {
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection + ", url=" + url;
+
+    LOG.info("connectToSolr() " + solrDetail);
+    if (StringUtils.isEmpty(collection)) {
+      throw new Exception("For solr, collection name is mandatory. " + solrDetail);
+    }
+    
+    if (StringUtils.isEmpty(zkConnectString) && StringUtils.isBlank(url))
+      throw new Exception("Both zkConnectString and URL are empty. zkConnectString=" + zkConnectString + ", collection=" +
+          collection + ", url=" + url);
+    
+    if (StringUtils.isNotEmpty(zkConnectString)) {
+      solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
+      LOG.info("Using zookeepr. " + solrDetail);
+      CloudSolrClient solrClouldClient = new CloudSolrClient(zkConnectString);
+      solrClouldClient.setDefaultCollection(collection);
+      solrClient = solrClouldClient;
+      checkSolrStatus(3 * 60 * 1000);
+    } else {
+      solrDetail = "collection=" + collection + ", url=" + url;
+      String collectionURL = url + "/" + collection;
+      LOG.info("Connecting to  solr : " + collectionURL);
+      solrClient = new HttpSolrClient(collectionURL);
+    }
+    return solrClient;
+  }
+
+  private boolean checkSolrStatus(int waitDurationMS) {
+    boolean status = false;
+    try {
+      long beginTimeMS = System.currentTimeMillis();
+      long waitIntervalMS = 2000;
+      int pingCount = 0;
+      while (true) {
+        pingCount++;
+        CollectionAdminResponse response = null;
+        try {
+          CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List();
+          response = colListReq.process(solrClient);
+        } catch (Exception ex) {
+          LOG.error("Con't connect to Solr. solrDetail=" + solrDetail, ex);
+        }
+        if (response != null && response.getStatus() == 0) {
+          LOG.info("Solr getCollections() is success. solr=" + solrDetail);
+          status = true;
+          break;
+        }
+        if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
+          LOG.error("Solr is not reachable even after " + (System.currentTimeMillis() - beginTimeMS)
+            + " ms. If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr="
+            + solrDetail + ", response=" + response);
+          break;
+        } else {
+          LOG.warn("Solr is not reachable yet. getCollections() attempt count=" + pingCount + ". Will sleep for " +
+              waitIntervalMS + " ms and try again." + " solr=" + solrDetail + ", response=" + response);
+        }
+        Thread.sleep(waitIntervalMS);
+      }
+    } catch (Throwable t) {
+      LOG.error("Seems Solr is not up. solrDetail=" + solrDetail, t);
+    }
+    return status;
+  }
+
+  public Map<String, Object> getConfigDoc() {
+    HashMap<String, Object> configMap = new HashMap<String, Object>();
+    SolrQuery solrQuery = new SolrQuery();
+    solrQuery.setQuery("*:*");
+    String fq = LogFeederConstants.ROW_TYPE + ":" + LogFeederConstants.LOGFEEDER_FILTER_NAME;
+    solrQuery.setFilterQueries(fq);
+    try {
+      QueryResponse response = process(solrQuery);
+      if (response != null) {
+        SolrDocumentList documentList = response.getResults();
+        if (CollectionUtils.isNotEmpty(documentList)) {
+          SolrDocument configDoc = documentList.get(0);
+          String configJson = LogFeederUtil.getGson().toJson(configDoc);
+          configMap = (HashMap<String, Object>) LogFeederUtil.toJSONObject(configJson);
+        }
+      }
+    } catch (Exception e) {
+      String logMessageKey = this.getClass().getSimpleName() + "_FETCH_FILTER_CONFIG_ERROR";
+      LogFeederUtil.logErrorMessageByInterval(logMessageKey, "Error getting filter config from solr", e, LOG, Level.ERROR);
+    }
+    return configMap;
+  }
+
+  private QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException, SolrException {
+    if (solrClient != null) {
+      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
+      return queryResponse;
+    } else {
+      LOG.error("solrClient can't be null");
+      return null;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java
new file mode 100644
index 0000000..4f52b0b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandler.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+public class LogConfigHandler extends Thread {
+  private static final Logger LOG = Logger.getLogger(LogConfigHandler.class);
+  
+  private static final int DEFAULT_SOLR_CONFIG_INTERVAL = 5;
+  private static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS";
+  private static final String TIMEZONE = "GMT";
+  
+  static {
+    TimeZone.setDefault(TimeZone.getTimeZone(TIMEZONE));
+  }
+  
+  private static ThreadLocal<DateFormat> formatter = new ThreadLocal<DateFormat>() {
+    protected DateFormat initialValue() {
+      SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT);
+      dateFormat.setTimeZone(TimeZone.getTimeZone(TIMEZONE));
+      return dateFormat;
+    }
+  };
+  
+  private static LogFeederFilterWrapper logFeederFilterWrapper;
+
+  private static boolean running = false;
+
+  public static void handleConfig() {
+    boolean filterEnable = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false);
+    if (!filterEnable) {
+      LOG.info("Logfeeder filter Scheduler is disabled.");
+      return;
+    }
+    if (!running) {
+      new LogConfigHandler().start();
+      running = true;
+      LOG.info("Logfeeder Filter Thread started!");
+    } else {
+      LOG.warn("Logfeeder Filter Thread is already running.");
+    }
+  }
+  
+  private LogConfigHandler() {
+    setName(getClass().getSimpleName());
+    setDaemon(true);
+  }
+
+  @Override
+  public void run() {
+    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
+    String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url");
+    if (StringUtils.isBlank(zkConnectString) && StringUtils.isBlank(solrUrl)) {
+      LOG.warn("Neither Solr ZK Connect String nor solr Url for UserConfig/History is set." +
+          "Won't look for level configuration from Solr.");
+      return;
+    }
+    
+    int solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", DEFAULT_SOLR_CONFIG_INTERVAL);
+    do {
+      LOG.debug("Updating config from solr after every " + solrConfigInterval + " sec.");
+      fetchConfig();
+      try {
+        Thread.sleep(1000 * solrConfigInterval);
+      } catch (InterruptedException e) {
+        LOG.error(e.getLocalizedMessage(), e.getCause());
+      }
+    } while (true);
+  }
+
+  private synchronized void fetchConfig() {
+    LogConfigFetcher fetcher = LogConfigFetcher.getInstance();
+    if (fetcher != null) {
+      Map<String, Object> configDocMap = fetcher.getConfigDoc();
+      String configJson = (String) configDocMap.get(LogFeederConstants.VALUES);
+      if (configJson != null) {
+        logFeederFilterWrapper = LogFeederUtil.getGson().fromJson(configJson, LogFeederFilterWrapper.class);
+      }
+    }
+  }
+
+  public static boolean isFilterAvailable() {
+    return logFeederFilterWrapper != null;
+  }
+
+  public static List<String> getAllowedLevels(String hostName, LogFeederFilter componentFilter) {
+    String componentName = componentFilter.getLabel();
+    List<String> hosts = componentFilter.getHosts();
+    List<String> defaultLevels = componentFilter.getDefaultLevels();
+    List<String> overrideLevels = componentFilter.getOverrideLevels();
+    String expiryTime = componentFilter.getExpiryTime();
+    
+    // check is user override or not
+    if (StringUtils.isNotEmpty(expiryTime) || CollectionUtils.isNotEmpty(overrideLevels) || CollectionUtils.isNotEmpty(hosts)) {
+      if (CollectionUtils.isEmpty(hosts)) { // hosts list is empty or null consider it apply on all hosts
+        hosts.add(LogFeederConstants.ALL);
+      }
+      
+      if (LogFeederUtil.isListContains(hosts, hostName, false)) {
+        if (isFilterExpired(componentFilter)) {
+          LOG.debug("Filter for component " + componentName + " and host :" + hostName + " is expired at " +
+              componentFilter.getExpiryTime());
+          return defaultLevels;
+        } else {
+          return overrideLevels;
+        }
+      }
+    }
+    return defaultLevels;
+  }
+
+  private static boolean isFilterExpired(LogFeederFilter logfeederFilter) {
+    if (logfeederFilter == null)
+      return false;
+    
+    Date filterEndDate = parseFilterExpireDate(logfeederFilter);
+    if (filterEndDate == null) {
+      return false;
+    }
+    
+    Date currentDate = new Date();
+    if (!currentDate.before(filterEndDate)) {
+      LOG.debug("Filter for  Component :" + logfeederFilter.getLabel() + " and Hosts : [" +
+          StringUtils.join(logfeederFilter.getHosts(), ',') + "] is expired because of filter endTime : " +
+          formatter.get().format(filterEndDate) + " is older than currentTime :" + formatter.get().format(currentDate));
+      return true;
+    } else {
+      return false;
+    }
+  }
+
+  private static Date parseFilterExpireDate(LogFeederFilter vLogfeederFilter) {
+    String expiryTime = vLogfeederFilter.getExpiryTime();
+    if (StringUtils.isNotEmpty(expiryTime)) {
+      try {
+        return formatter.get().parse(expiryTime);
+      } catch (ParseException e) {
+        LOG.error("Filter have invalid ExpiryTime : " + expiryTime + " for component :" + vLogfeederFilter.getLabel()
+          + " and hosts : [" + StringUtils.join(vLogfeederFilter.getHosts(), ',') + "]");
+      }
+    }
+    return null;
+  }
+  
+  public static LogFeederFilter findComponentFilter(String componentName) {
+    if (logFeederFilterWrapper != null) {
+      HashMap<String, LogFeederFilter> filter = logFeederFilterWrapper.getFilter();
+      if (filter != null) {
+        LogFeederFilter componentFilter = filter.get(componentName);
+        if (componentFilter != null) {
+          return componentFilter;
+        }
+      }
+    }
+    LOG.trace("Filter is not there for component :" + componentName);
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
deleted file mode 100644
index 09673a0..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederConstants.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.logconfig;
-
-public class LogFeederConstants {
-
-  public static final String ALL = "all";
-  public static final String LOGFEEDER_FILTER_NAME = "log_feeder_config";
-  public static final String LOG_LEVEL_UNKNOWN = "UNKNOWN";
-  // solr fields
-  public static final String SOLR_LEVEL = "level";
-  public static final String SOLR_COMPONENT = "type";
-  public static final String SOLR_HOST = "host";
-
-  // UserConfig Constants History
-  public static final String VALUES = "jsons";
-  public static final String ROW_TYPE = "rowtype";
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java
new file mode 100644
index 0000000..60c8ae8
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilter.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class LogFeederFilter {
+
+  private String label;
+  private List<String> hosts;
+  private List<String> defaultLevels;
+  private List<String> overrideLevels;
+  private String expiryTime;
+
+  public LogFeederFilter() {
+    hosts = new ArrayList<String>();
+    defaultLevels = new ArrayList<String>();
+    overrideLevels = new ArrayList<String>();
+  }
+
+  public String getLabel() {
+    return label;
+  }
+
+  public void setLabel(String label) {
+    this.label = label;
+  }
+
+  public List<String> getHosts() {
+    return hosts;
+  }
+
+  public void setHosts(List<String> hosts) {
+    this.hosts = hosts;
+  }
+
+  public List<String> getDefaultLevels() {
+    return defaultLevels;
+  }
+
+  public void setDefaultLevels(List<String> defaultLevels) {
+    this.defaultLevels = defaultLevels;
+  }
+
+  public List<String> getOverrideLevels() {
+    return overrideLevels;
+  }
+
+  public void setOverrideLevels(List<String> overrideLevels) {
+    this.overrideLevels = overrideLevels;
+  }
+
+  public String getExpiryTime() {
+    return expiryTime;
+  }
+
+  public void setExpiryTime(String expiryTime) {
+    this.expiryTime = expiryTime;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java
new file mode 100644
index 0000000..9199cd3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogFeederFilterWrapper.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logfeeder.logconfig;
+
+import java.util.HashMap;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class LogFeederFilterWrapper {
+
+  private HashMap<String, LogFeederFilter> filter;
+  private String id;
+
+  public HashMap<String, LogFeederFilter> getFilter() {
+    return filter;
+  }
+
+  public void setFilter(HashMap<String, LogFeederFilter> filter) {
+    this.filter = filter;
+  }
+
+  public String getId() {
+    return id;
+  }
+
+  public void setId(String id) {
+    this.id = id;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
deleted file mode 100644
index bc807193..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/LogfeederScheduler.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
-
-public enum LogfeederScheduler {
-
-  INSTANCE;
-
-  private Logger logger = Logger.getLogger(LogfeederScheduler.class);
-
-  private static boolean running = false;
-
-  public synchronized void start() {
-    boolean filterEnable = LogFeederUtil.getBooleanProperty("logfeeder.log.filter.enable", false);
-    if (!filterEnable) {
-      logger.info("Logfeeder  filter Scheduler is disabled.");
-      return;
-    }
-    if (!running) {
-      for (Thread thread : getThreadList()) {
-        thread.start();
-      }
-      running = true;
-      logger.info("Logfeeder Scheduler started!");
-    } else {
-      logger.warn("Logfeeder Scheduler is already running.");
-    }
-  }
-
-  private List<Thread> getThreadList() {
-    List<Thread> tasks = new ArrayList<Thread>();
-    Thread configMonitor = new FetchConfigFromSolr(true);
-    tasks.add(configMonitor);
-    return tasks;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
deleted file mode 100644
index b5e4eb3..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/ApplyLogFilter.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.logconfig.FetchConfigFromSolr;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.ambari.logfeeder.view.VLogfeederFilter;
-import org.apache.log4j.Logger;
-
-class ApplyLogFilter extends DefaultDataFilter {
-
-  private static Logger logger = Logger.getLogger(ApplyLogFilter.class);
-
-  @Override
-  public boolean applyFilter(Map<String, Object> jsonObj, boolean defaultValue) {
-    if (isEmpty(jsonObj)) {
-      logger.warn("Output jsonobj is empty");
-      return defaultValue;
-    }
-    String hostName = (String) jsonObj.get(LogFeederConstants.SOLR_HOST);
-    if (isNotEmpty(hostName)) {
-      String componentName = (String) jsonObj.get(LogFeederConstants.SOLR_COMPONENT);
-      if (isNotEmpty(componentName)) {
-        String level = (String) jsonObj.get(LogFeederConstants.SOLR_LEVEL);
-        if (isNotEmpty(level)) {
-          VLogfeederFilter componentFilter = FetchConfigFromSolr.findComponentFilter(componentName);
-          if (componentFilter == null) {
-            return defaultValue;
-          }
-          List<String> allowedLevels = FetchConfigFromSolr.getAllowedLevels(
-              hostName, componentFilter);
-          if (allowedLevels == null || allowedLevels.isEmpty()) {
-            allowedLevels.add(LogFeederConstants.ALL);
-          }
-          return LogFeederUtil.isListContains(allowedLevels, level, false);
-        }
-      }
-    }
-    return defaultValue;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
deleted file mode 100644
index 04d2ca4..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/DefaultDataFilter.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.Map;
-
-/**
- * Default filter to allow everything
- */
-class DefaultDataFilter {
-  public boolean applyFilter(Map<String, Object> outputJsonObj, boolean defaultValue) {
-    return defaultValue;
-  }
-
-  protected boolean isEmpty(Map<String, Object> map) {
-    if (map == null || map.isEmpty()) {
-      return true;
-    }
-    return false;
-  }
-
-  protected boolean isEmpty(String str) {
-    if (str == null || str.trim().isEmpty()) {
-      return true;
-    }
-    return false;
-  }
-
-  protected boolean isNotEmpty(String str) {
-    return !isEmpty(str);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
deleted file mode 100644
index 3a8eae9..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/filter/FilterLogData.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.logconfig.filter;
-
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.logconfig.filter.ApplyLogFilter;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.log4j.Logger;
-
-/**
- * Read configuration from solr and filter the log
- */
-public enum FilterLogData {
-  INSTANCE;
-  private ApplyLogFilter applyLogFilter = new ApplyLogFilter();
-  private static Logger logger = Logger.getLogger(FilterLogData.class);
-  // by default allow every log
-  boolean defaultValue = true;
-
-  public boolean isAllowed(String jsonBlock) {
-    if (jsonBlock == null || jsonBlock.isEmpty()) {
-      return defaultValue;
-    }
-    Map<String, Object> jsonObj = LogFeederUtil.toJSONObject(jsonBlock);
-    return isAllowed(jsonObj);
-  }
-
-  public boolean isAllowed(Map<String, Object> jsonObj) {
-    boolean isAllowed = applyLogFilter.applyFilter(jsonObj, defaultValue);
-    if (!isAllowed) {
-      logger.trace("Filter block the content :" + LogFeederUtil.getGson().toJson(jsonObj));
-    }
-    return isAllowed;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
index 906dd25..96709c0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/Mapper.java
@@ -26,22 +26,18 @@ public abstract class Mapper {
   protected String fieldName;
   private String mapClassCode;
 
-  public boolean init(String inputDesc, String fieldName,
-                      String mapClassCode, Object mapConfigs) {
+  public abstract boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs);
+
+  protected void init(String inputDesc, String fieldName, String mapClassCode) {
     this.inputDesc = inputDesc;
     this.fieldName = fieldName;
     this.mapClassCode = mapClassCode;
-    return true;
   }
 
-  public Object apply(Map<String, Object> jsonObj, Object value) {
-    return value;
-  }
+  public abstract Object apply(Map<String, Object> jsonObj, Object value);
 
   @Override
   public String toString() {
-    return "mapClass=" + mapClassCode + ", input=" + inputDesc
-      + ", fieldName=" + fieldName;
+    return "mapClass=" + mapClassCode + ", input=" + inputDesc + ", fieldName=" + fieldName;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
index 6dbf8be..eb3ae01 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperDate.java
@@ -31,31 +31,29 @@ import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
 public class MapperDate extends Mapper {
-  private static final Logger logger = Logger.getLogger(MapperDate.class);
+  private static final Logger LOG = Logger.getLogger(MapperDate.class);
 
   private SimpleDateFormat targetDateFormatter = null;
   private boolean isEpoch = false;
   private SimpleDateFormat srcDateFormatter=null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-                      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-        + mapConfigs.getClass().getName()
-        + ", map="
-        + this.toString());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName() +
+        ", map=" + this);
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     String targetDateFormat = (String) mapObjects.get("target_date_pattern");
     String srcDateFormat = (String) mapObjects.get("src_date_pattern");
     if (StringUtils.isEmpty(targetDateFormat)) {
-      logger.fatal("Date format for map is empty. " + this.toString());
+      LOG.fatal("Date format for map is empty. " + this);
     } else {
-      logger.info("Date mapper format is " + targetDateFormat);
+      LOG.info("Date mapper format is " + targetDateFormat);
 
       if (targetDateFormat.equalsIgnoreCase("epoch")) {
         isEpoch = true;
@@ -68,8 +66,7 @@ public class MapperDate extends Mapper {
           }
           return true;
         } catch (Throwable ex) {
-          logger.fatal("Error creating date format. format="
-            + targetDateFormat + ". " + this.toString());
+          LOG.fatal("Error creating date format. format=" + targetDateFormat + ". " + this.toString());
         }
       } 
     }
@@ -84,7 +81,7 @@ public class MapperDate extends Mapper {
           long ms = Long.parseLong(value.toString()) * 1000;
           value = new Date(ms);
         } else if (targetDateFormatter != null) {
-          if(srcDateFormatter!=null){
+          if (srcDateFormatter != null) {
             Date srcDate = srcDateFormatter.parse(value.toString());
             //set year in src_date when src_date does not have year component
             if (!srcDateFormatter.toPattern().contains("yy")) {
@@ -108,12 +105,9 @@ public class MapperDate extends Mapper {
         }
         jsonObj.put(fieldName, value);
       } catch (Throwable t) {
-        LogFeederUtil.logErrorMessageByInterval(this.getClass()
-            .getSimpleName() + ":apply",
-          "Error applying date transformation. isEpoch="
-            + isEpoch + ", targetateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.toPattern():"")
-            + ", value=" + value + ". " + this.toString(),
-          t, logger, Level.ERROR);
+        LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply", "Error applying date transformation." +
+            " isEpoch=" + isEpoch + ", targetateFormat=" + (targetDateFormatter!=null ?targetDateFormatter.toPattern():"")
+            + ", value=" + value + ". " + this.toString(), t, LOG, Level.ERROR);
       }
     }
     return value;

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
index c692a9d..9b6e83c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldName.java
@@ -30,24 +30,23 @@ import org.apache.log4j.Logger;
  * Overrides the value for the field
  */
 public class MapperFieldName extends Mapper {
-  private static final Logger logger = Logger.getLogger(MapperFieldName.class);
+  private static final Logger LOG = Logger.getLogger(MapperFieldName.class);
 
   private String newValue = null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-          + mapConfigs.getClass().getName());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName());
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     newValue = (String) mapObjects.get("new_fieldname");
     if (StringUtils.isEmpty(newValue)) {
-      logger.fatal("Map field value is empty.");
+      LOG.fatal("Map field value is empty.");
       return false;
     }
     return true;
@@ -59,12 +58,9 @@ public class MapperFieldName extends Mapper {
       jsonObj.remove(fieldName);
       jsonObj.put(newValue, value);
     } else {
-      LogFeederUtil.logErrorMessageByInterval(this.getClass()
-          .getSimpleName() + ":apply",
-          "New fieldName is null, so transformation is not applied. "
-              + this.toString(), null, logger, Level.ERROR);
+      LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply",
+          "New fieldName is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR);
     }
     return value;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
index e618261..87cda65 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldValue.java
@@ -30,25 +30,25 @@ import org.apache.log4j.Logger;
  * Overrides the value for the field
  */
 public class MapperFieldValue extends Mapper {
-  private Logger logger = Logger.getLogger(MapperFieldValue.class);
+  private static final Logger LOG = Logger.getLogger(MapperFieldValue.class);
+  
   private String prevValue = null;
   private String newValue = null;
 
   @Override
-  public boolean init(String inputDesc, String fieldName,
-      String mapClassCode, Object mapConfigs) {
-    super.init(inputDesc, fieldName, mapClassCode, mapConfigs);
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
     if (!(mapConfigs instanceof Map)) {
-      logger.fatal("Can't initialize object. mapConfigs class is not of type Map. "
-          + mapConfigs.getClass().getName());
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName());
       return false;
     }
+    
     @SuppressWarnings("unchecked")
     Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
     prevValue = (String) mapObjects.get("pre_value");
     newValue = (String) mapObjects.get("post_value");
     if (StringUtils.isEmpty(newValue)) {
-      logger.fatal("Map field value is empty.");
+      LOG.fatal("Map field value is empty.");
       return false;
     }
     return true;
@@ -56,20 +56,15 @@ public class MapperFieldValue extends Mapper {
 
   @Override
   public Object apply(Map<String, Object> jsonObj, Object value) {
-    if (newValue != null) {
-      if (prevValue != null) {
-        if (prevValue.equalsIgnoreCase(value.toString())) {
-          value = newValue;
-          jsonObj.put(fieldName, value);
-        }
+    if (newValue != null && prevValue != null) {
+      if (prevValue.equalsIgnoreCase(value.toString())) {
+        value = newValue;
+        jsonObj.put(fieldName, value);
       }
     } else {
-      LogFeederUtil.logErrorMessageByInterval(
-          this.getClass().getSimpleName() + ":apply",
-          "New value is null, so transformation is not applied. "
-              + this.toString(), null, logger, Level.ERROR);
+      LogFeederUtil.logErrorMessageByInterval(this.getClass().getSimpleName() + ":apply",
+          "New value is null, so transformation is not applied. " + this.toString(), null, LOG, Level.ERROR);
     }
     return value;
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
index 0a0f4e9..32dfef2 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/LogFeederAMSClient.java
@@ -20,26 +20,26 @@
 package org.apache.ambari.logfeeder.metrics;
 
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.log4j.Logger;
 
 // TODO: Refactor for failover
 public class LogFeederAMSClient extends AbstractTimelineMetricsSink {
-  private static final Logger logger = Logger.getLogger(LogFeederAMSClient.class);
+  private static final Logger LOG = Logger.getLogger(LogFeederAMSClient.class);
 
   private String collectorHosts = null;
 
   public LogFeederAMSClient() {
-    collectorHosts = LogFeederUtil
-      .getStringProperty("logfeeder.metrics.collector.hosts");
-    if (collectorHosts != null && collectorHosts.trim().length() == 0) {
+    collectorHosts = LogFeederUtil.getStringProperty("logfeeder.metrics.collector.hosts");
+    if (StringUtils.isBlank(collectorHosts)) {
       collectorHosts = null;
     }
     if (collectorHosts != null) {
       collectorHosts = collectorHosts.trim();
     }
-    logger.info("AMS collector URL=" + collectorHosts);
+    LOG.info("AMS collector URL=" + collectorHosts);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
deleted file mode 100644
index abb84c7..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricCount.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.metrics;
-
-public class MetricCount {
-  public String metricsName = null;
-  public boolean isPointInTime = false;
-
-  public long count = 0;
-  public long prevLogCount = 0;
-  public long prevLogMS = System.currentTimeMillis();
-  public long prevPublishCount = 0;
-  public int publishCount = 0; // Count of published metrics. Used for first time sending metrics
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java
new file mode 100644
index 0000000..e7f5d37
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricData.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import org.apache.commons.lang3.builder.ToStringBuilder;
+import org.apache.commons.lang3.builder.ToStringStyle;
+
+public class MetricData {
+  public final String metricsName;
+  public final boolean isPointInTime;
+
+  public MetricData(String metricsName, boolean isPointInTime) {
+    this.metricsName = metricsName;
+    this.isPointInTime = isPointInTime;
+  }
+  
+  public long value = 0;
+  public long prevPublishValue = 0;
+  
+  public long prevLogValue = 0;
+  public long prevLogTime = System.currentTimeMillis();
+  
+  public int publishCount = 0; // Number of times the metric was published so far
+  
+  @Override
+  public String toString() {
+    return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java
new file mode 100644
index 0000000..942c0b4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsManager.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.metrics;
+
+import java.net.InetAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.TreeMap;
+
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.log4j.Logger;
+
+public class MetricsManager {
+  private static final Logger LOG = Logger.getLogger(MetricsManager.class);
+
+  private boolean isMetricsEnabled = false;
+  private String nodeHostName = null;
+  private String appId = "logfeeder";
+
+  private long lastPublishTimeMS = 0; // Let's do the first publish immediately
+  private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock
+
+  private int publishIntervalMS = 60 * 1000;
+  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep the metrics in memory forever
+  private HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
+  private LogFeederAMSClient amsClient = null;
+
+  public void init() {
+    LOG.info("Initializing MetricsManager()");
+    amsClient = new LogFeederAMSClient();
+
+    if (amsClient.getCollectorUri(null) != null) {
+      findNodeHostName();
+      if (nodeHostName == null) {
+        isMetricsEnabled = false;
+        LOG.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
+      } else {
+        isMetricsEnabled = true;
+        LOG.info("LogFeeder Metrics is enabled. Metrics host=" + amsClient.getCollectorUri(null));
+      }
+    } else {
+      LOG.info("LogFeeder Metrics publish is disabled");
+    }
+  }
+
+  private void findNodeHostName() {
+    nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
+    if (nodeHostName == null) {
+      try {
+        nodeHostName = InetAddress.getLocalHost().getHostName();
+      } catch (Throwable e) {
+        LOG.warn("Error getting hostname using InetAddress.getLocalHost().getHostName()", e);
+      }
+    }
+    if (nodeHostName == null) {
+      try {
+        nodeHostName = InetAddress.getLocalHost().getCanonicalHostName();
+      } catch (Throwable e) {
+        LOG.warn("Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()", e);
+      }
+    }
+  }
+
+  public boolean isMetricsEnabled() {
+    return isMetricsEnabled;
+  }
+
+  public synchronized void useMetrics(List<MetricData> metricsList) {
+    if (!isMetricsEnabled) {
+      return;
+    }
+    LOG.info("useMetrics() metrics.size=" + metricsList.size());
+    long currMS = System.currentTimeMillis();
+    
+    gatherMetrics(metricsList, currMS);
+    publishMetrics(currMS);
+  }
+
+  private void gatherMetrics(List<MetricData> metricsList, long currMS) {
+    Long currMSLong = new Long(currMS);
+    for (MetricData metric : metricsList) {
+      if (metric.metricsName == null) {
+        LOG.debug("metric.metricsName is null");
+        continue;
+      }
+      long currCount = metric.value;
+      if (!metric.isPointInTime && metric.publishCount > 0 && currCount <= metric.prevPublishValue) {
+        LOG.debug("Nothing changed. " + metric.metricsName + ", currCount=" + currCount + ", prevPublishCount=" +
+            metric.prevPublishValue);
+        continue;
+      }
+      metric.publishCount++;
+
+      LOG.debug("Ensuring metrics=" + metric.metricsName);
+      TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
+      if (timelineMetric == null) {
+        LOG.debug("Creating new metric obbject for " + metric.metricsName);
+        timelineMetric = new TimelineMetric();
+        timelineMetric.setMetricName(metric.metricsName);
+        timelineMetric.setHostName(nodeHostName);
+        timelineMetric.setAppId(appId);
+        timelineMetric.setStartTime(currMS);
+        timelineMetric.setType("Long");
+        timelineMetric.setMetricValues(new TreeMap<Long, Double>());
+
+        metricsMap.put(metric.metricsName, timelineMetric);
+      }
+      
+      LOG.debug("Adding metrics=" + metric.metricsName);
+      if (metric.isPointInTime) {
+        timelineMetric.getMetricValues().put(currMSLong, new Double(currCount));
+      } else {
+        Double value = timelineMetric.getMetricValues().get(currMSLong);
+        if (value == null) {
+          value = new Double(0);
+        }
+        value += (currCount - metric.prevPublishValue);
+        timelineMetric.getMetricValues().put(currMSLong, value);
+        metric.prevPublishValue = currCount;
+      }
+    }
+  }
+
+  private void publishMetrics(long currMS) {
+    if (!metricsMap.isEmpty() && currMS - lastPublishTimeMS > publishIntervalMS) {
+      try {
+        TimelineMetrics timelineMetrics = new TimelineMetrics();
+        timelineMetrics.setMetrics(new ArrayList<TimelineMetric>(metricsMap.values()));
+        amsClient.emitMetrics(timelineMetrics);
+        
+        LOG.info("Published " + timelineMetrics.getMetrics().size() + " metrics to AMS");
+        metricsMap.clear();
+        lastPublishTimeMS = currMS;
+      } catch (Throwable t) {
+        LOG.warn("Error sending metrics to AMS.", t);
+        if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
+          LOG.error("AMS was not sent for last " + maxMetricsBuffer / 1000 +
+              " seconds. Purging it and will start rebuilding it again");
+          metricsMap.clear();
+          lastFailedPublishTimeMS = currMS;
+        }
+      }
+    } else {
+      LOG.info("Not publishing metrics. metrics.size()=" + metricsMap.size() + ", lastPublished=" +
+          (currMS - lastPublishTimeMS) / 1000 + " seconds ago, intervalConfigured=" + publishIntervalMS / 1000);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
deleted file mode 100644
index 33397c7..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/metrics/MetricsMgr.java
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder.metrics;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TreeMap;
-
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.log4j.Logger;
-
-public class MetricsMgr {
-  private static final Logger logger = Logger.getLogger(MetricsMgr.class);
-
-  private boolean isMetricsEnabled = false;
-  private String nodeHostName = null;
-  private String appId = "logfeeder";
-
-  private long lastPublishTimeMS = 0; // Let's do the first publish immediately
-  private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock
-
-  private int publishIntervalMS = 60 * 1000;
-  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep
-  // the metrics in memory forever
-  private HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
-  private LogFeederAMSClient amsClient = null;
-
-  public void init() {
-    logger.info("Initializing MetricsMgr()");
-    amsClient = new LogFeederAMSClient();
-
-    if (amsClient.getCollectorUri(null) != null) {
-      nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
-      if (nodeHostName == null) {
-        try {
-          nodeHostName = InetAddress.getLocalHost().getHostName();
-        } catch (Throwable e) {
-          logger.warn(
-            "Error getting hostname using InetAddress.getLocalHost().getHostName()",
-            e);
-        }
-        if (nodeHostName == null) {
-          try {
-            nodeHostName = InetAddress.getLocalHost()
-              .getCanonicalHostName();
-          } catch (Throwable e) {
-            logger.warn(
-              "Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()",
-              e);
-          }
-        }
-      }
-      if (nodeHostName == null) {
-        isMetricsEnabled = false;
-        logger.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
-      } else {
-        isMetricsEnabled = true;
-        logger.info("LogFeeder Metrics is enabled. Metrics host="
-          + amsClient.getCollectorUri(null));
-      }
-    } else {
-      logger.info("LogFeeder Metrics publish is disabled");
-    }
-  }
-
-  public boolean isMetricsEnabled() {
-    return isMetricsEnabled;
-  }
-
-  synchronized public void useMetrics(List<MetricCount> metricsList) {
-    if (!isMetricsEnabled) {
-      return;
-    }
-    logger.info("useMetrics() metrics.size=" + metricsList.size());
-    long currMS = System.currentTimeMillis();
-    Long currMSLong = new Long(currMS);
-    for (MetricCount metric : metricsList) {
-      if (metric.metricsName == null) {
-        logger.debug("metric.metricsName is null");
-        // Metrics is not meant to be published
-        continue;
-      }
-      long currCount = metric.count;
-      if (!metric.isPointInTime && metric.publishCount > 0
-        && currCount <= metric.prevPublishCount) {
-        // No new data added, so let's ignore it
-        logger.debug("Nothing changed. " + metric.metricsName
-          + ", currCount=" + currCount + ", prevPublishCount="
-          + metric.prevPublishCount);
-        continue;
-      }
-      metric.publishCount++;
-
-      TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
-      if (timelineMetric == null) {
-        logger.debug("Creating new metric obbject for "
-          + metric.metricsName);
-        // First time for this metric
-        timelineMetric = new TimelineMetric();
-        timelineMetric.setMetricName(metric.metricsName);
-        timelineMetric.setHostName(nodeHostName);
-        timelineMetric.setAppId(appId);
-        timelineMetric.setStartTime(currMS);
-        timelineMetric.setType("Long");
-        timelineMetric.setMetricValues(new TreeMap<Long, Double>());
-
-        metricsMap.put(metric.metricsName, timelineMetric);
-      }
-      logger.debug("Adding metrics=" + metric.metricsName);
-      if (metric.isPointInTime) {
-        timelineMetric.getMetricValues().put(currMSLong,
-          new Double(currCount));
-      } else {
-        Double value = timelineMetric.getMetricValues().get(currMSLong);
-        if (value == null) {
-          value = new Double(0);
-        }
-        value += (currCount - metric.prevPublishCount);
-        timelineMetric.getMetricValues().put(currMSLong, value);
-        metric.prevPublishCount = currCount;
-      }
-    }
-
-    if (metricsMap.size() > 0
-      && currMS - lastPublishTimeMS > publishIntervalMS) {
-      try {
-        // Time to publish
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        List<TimelineMetric> timeLineMetricList = new ArrayList<TimelineMetric>();
-        timeLineMetricList.addAll(metricsMap.values());
-        timelineMetrics.setMetrics(timeLineMetricList);
-        amsClient.emitMetrics(timelineMetrics);
-        logger.info("Published " + timeLineMetricList.size()
-          + " metrics to AMS");
-        metricsMap.clear();
-        timeLineMetricList.clear();
-        lastPublishTimeMS = currMS;
-      } catch (Throwable t) {
-        logger.warn("Error sending metrics to AMS.", t);
-        if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
-          logger.error("AMS was not sent for last "
-            + maxMetricsBuffer
-            / 1000
-            + " seconds. Purging it and will start rebuilding it again");
-          metricsMap.clear();
-          lastFailedPublishTimeMS = currMS;
-        }
-      }
-    } else {
-      logger.info("Not publishing metrics. metrics.size()="
-        + metricsMap.size() + ", lastPublished="
-        + (currMS - lastPublishTimeMS) / 1000
-        + " seconds ago, intervalConfigured=" + publishIntervalMS
-        / 1000);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
index 6f84251..bc6a553 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/Output.java
@@ -26,16 +26,19 @@ import java.util.Map.Entry;
 
 import org.apache.ambari.logfeeder.common.ConfigBlock;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.log4j.Logger;
 
 public abstract class Output extends ConfigBlock {
-  private static final Logger logger = Logger.getLogger(Output.class);
+  private static final Logger LOG = Logger.getLogger(Output.class);
 
   private String destination = null;
 
-  protected MetricCount writeBytesMetric = new MetricCount();
+  protected MetricData writeBytesMetric = new MetricData(getWriteBytesMetricName(), false);
+  protected String getWriteBytesMetricName() {
+    return null;
+  }
 
   @Override
   public String getShortDescription() {
@@ -67,7 +70,7 @@ public abstract class Output extends ConfigBlock {
    * Extend this method to clean up
    */
   public void close() {
-    logger.info("Calling base close()." + getShortDescription());
+    LOG.info("Calling base close()." + getShortDescription());
     isClosed = true;
   }
 
@@ -91,7 +94,7 @@ public abstract class Output extends ConfigBlock {
   }
 
   @Override
-  public void addMetricsContainers(List<MetricCount> metricsList) {
+  public void addMetricsContainers(List<MetricData> metricsList) {
     super.addMetricsContainers(metricsList);
     metricsList.add(writeBytesMetric);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
index 4a408f9..c46086e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputData.java
@@ -27,18 +27,16 @@ import org.apache.ambari.logfeeder.input.InputMarker;
  * This contains the output json object and InputMarker.
  */
 public class OutputData {
-  Map<String, Object> jsonObj;
-  InputMarker inputMarker;
+  public final Map<String, Object> jsonObj;
+  public final InputMarker inputMarker;
 
   public OutputData(Map<String, Object> jsonObj, InputMarker inputMarker) {
-    super();
     this.jsonObj = jsonObj;
     this.inputMarker = inputMarker;
   }
 
   @Override
   public String toString() {
-    return "OutputData [jsonObj=" + jsonObj + ", inputMarker="
-      + inputMarker + "]";
+    return "OutputData [jsonObj=" + jsonObj + ", inputMarker=" + inputMarker + "]";
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java
index 2d41a0b..fa4e17b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputDevNull.java
@@ -28,16 +28,15 @@ import org.apache.log4j.Logger;
  */
 public class OutputDevNull extends Output {
 
-  private static Logger logger = Logger.getLogger(OutputDevNull.class);
+  private static final Logger LOG = Logger.getLogger(OutputDevNull.class);
 
   @Override
   public void write(String block, InputMarker inputMarker){
-    logger.trace("Ignore log block: " + block);
+    LOG.trace("Ignore log block: " + block);
   }
 
   @Override
   public void copyFile(File inputFile, InputMarker inputMarker) {
-    throw new UnsupportedOperationException(
-        "copyFile method is not yet supported for output=dev_null");
+    throw new UnsupportedOperationException("copyFile method is not yet supported for output=dev_null");
   }
 }


[13/50] [abbrv] ambari git commit: AMBARI-18196. Logsearch: Upgrade from jersey 1.x to jersey 2.x (oleewere)

Posted by ol...@apache.org.
AMBARI-18196. Logsearch: Upgrade from jersey 1.x to jersey 2.x (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82e2d1b1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82e2d1b1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82e2d1b1

Branch: refs/heads/branch-dev-logsearch
Commit: 82e2d1b15ad4fd35f755d4907723770f8a296fb6
Parents: df3de10
Author: oleewere <ol...@gmail.com>
Authored: Tue Aug 23 18:55:59 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:59 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-portal/pom.xml             |  54 +++++-
 .../logsearch/util/ExternalServerClient.java    | 191 ++++---------------
 .../src/main/webapp/WEB-INF/web.xml             |  12 +-
 3 files changed, 85 insertions(+), 172 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82e2d1b1/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index 8d345d9..b36c9bb 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -33,7 +33,7 @@
     <spring.version>4.2.5.RELEASE</spring.version>
     <spring.security.version>4.0.4.RELEASE</spring.security.version>
     <spring.ldap.version>2.0.4.RELEASE</spring.ldap.version>
-    <jersey.version>1.19</jersey.version>
+    <jersey.version>2.23.2</jersey.version>
     <jetty-version>9.2.11.v20150529</jetty-version>
     <swagger.version>1.5.8</swagger.version>
   </properties>
@@ -521,9 +521,9 @@
       <version>${spring.security.version}</version>
     </dependency>
     <dependency>
-      <groupId>com.sun.jersey.contribs</groupId>
-      <artifactId>jersey-spring</artifactId>
-      <version>${jersey.version}</version>
+      <groupId>org.glassfish.jersey.ext</groupId>
+      <artifactId>jersey-spring3</artifactId>
+      <version>2.23.2</version>
       <exclusions>
         <exclusion>
           <groupId>org.springframework</groupId>
@@ -532,10 +532,30 @@
       </exclusions>
     </dependency>
     <dependency>
-    <groupId>com.sun.jersey.contribs</groupId>
-    <artifactId>jersey-apache-client</artifactId>
-    <version>${jersey.version}</version>
-  </dependency>
+      <groupId>org.glassfish.jersey.connectors</groupId>
+      <artifactId>jersey-apache-connector</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.core</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-json-jettison</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.media</groupId>
+      <artifactId>jersey-media-moxy</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.glassfish.jersey.core</groupId>
+      <artifactId>jersey-common</artifactId>
+      <version>${jersey.version}</version>
+    </dependency>
   <dependency>
     <groupId>javax.servlet</groupId>
     <artifactId>javax.servlet-api</artifactId>
@@ -590,6 +610,22 @@
           <groupId>org.mortbay.jetty</groupId>
           <artifactId>jetty-util</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-json</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.sun.jersey</groupId>
+          <artifactId>jersey-server</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -696,7 +732,7 @@
     </dependency>
     <dependency>
       <groupId>io.swagger</groupId>
-      <artifactId>swagger-jaxrs</artifactId>
+      <artifactId>swagger-jersey2-jaxrs</artifactId>
       <version>${swagger.version}</version>
       <exclusions>
         <exclusion>

http://git-wip-us.apache.org/repos/asf/ambari/blob/82e2d1b1/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
index 32e547e..882a8bd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
@@ -18,39 +18,36 @@
  */
 package org.apache.ambari.logsearch.util;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.UnknownHostException;
 import java.util.List;
-import java.util.Map.Entry;
+import java.util.Map;
 
 import javax.annotation.PostConstruct;
-import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.client.Invocation;
+import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
 
 import org.apache.ambari.logsearch.web.security.LogsearchAbstractAuthenticationProvider;
-import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
+import org.glassfish.jersey.client.JerseyClient;
+import org.glassfish.jersey.client.JerseyClientBuilder;
+import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
+import org.glassfish.jersey.filter.LoggingFilter;
 import org.springframework.stereotype.Component;
 
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientHandlerException;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.UniformInterfaceException;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
-import com.sun.jersey.api.client.filter.LoggingFilter;
-import com.sun.jersey.client.apache.ApacheHttpClient;
-import com.sun.jersey.client.apache.config.DefaultApacheHttpClientConfig;
-import com.sun.jersey.core.util.MultivaluedMapImpl;
-
 /**
  * Layer to send REST request to External server using jersey client
  */
 @Component
 public class ExternalServerClient {
   private static Logger LOG = Logger.getLogger(ExternalServerClient.class);
-  private static final ThreadLocal<Client> localJerseyClient = new ThreadLocal<Client>();
-  private DefaultApacheHttpClientConfig defaultConfig = new DefaultApacheHttpClientConfig();
+  private static final ThreadLocal<JerseyClient> localJerseyClient = new ThreadLocal<JerseyClient>(){
+    @Override
+    protected JerseyClient initialValue() {
+      return JerseyClientBuilder.createClient();
+    }
+  };
   private String hostURL = "http://host:ip";// default
   private boolean enableLog = false;// default
 
@@ -61,151 +58,39 @@ public class ExternalServerClient {
             + "external_auth.host_url", hostURL);
   }
 
-  private Client getJerseyClient() {
-    Client jerseyClient = localJerseyClient.get();
-    if (jerseyClient == null) {
-      jerseyClient = ApacheHttpClient.create(defaultConfig);
-      localJerseyClient.set(jerseyClient);
-    }
-    return jerseyClient;
-  }
-
   /**
-   * Send GET Request to  External server
-   * @param url
-   * @param klass
-   * @param queryParam
-   * @param username
-   * @param password
-   * @return Response Object 
-   * @throws UnknownHostException
-   * @throws Exception
+   * Send GET request to an external server
    */
   @SuppressWarnings({ "unchecked", "rawtypes" })
-  public Object sendGETRequest(String url, Class klass,
-      MultivaluedMapImpl queryParam, String username, String password)
-      throws UnknownHostException, Exception {
-    // add host url
+  public Object sendGETRequest(String url, Class klass, MultivaluedMap<String, String> queryParam,
+                               String username, String password)
+      throws Exception {
     url = hostURL + url;
-    String parameters = getQueryParameter(queryParam);
-    LOG.debug("URL: " + url + " query parameters are : " + parameters);
-    WebResource.Builder builder = buildWebResourceBuilder(url, queryParam,
-        username, password);
-    try {
-      return builder.get(klass);
-    } catch (WebApplicationException webApplicationException) {
-      String errMsg = webApplicationExceptionHandler(webApplicationException,
-          url);
-      throw new Exception(errMsg);
-    } catch (UniformInterfaceException uniformInterfaceException) {
-      String errMsg = uniformInterfaceExceptionHandler(
-          uniformInterfaceException, url);
-      throw new Exception(errMsg);
-    } catch (ClientHandlerException clientHandlerException) {
-      String errMsg = clientHandlerExceptionHandler(clientHandlerException, url);
-      throw new Exception(errMsg);
-    } catch (Exception e) {
-      Object response = builder.get(Object.class);
-      String errMsg = "URL: " + url + response.toString();
-      LOG.error(errMsg);
-      throw new Exception(errMsg);
-    } finally {
-      cleanup();
-    }
-  }
+    JerseyClient client = localJerseyClient.get();
+    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder().build();
 
-  private WebResource.Builder buildWebResourceBuilder(String url,
-      MultivaluedMapImpl queryParam, String username, String password) {
-    WebResource webResource = getJerseyClient().resource(url);
-    // add filter
+    client.register(authFeature);
     if (enableLog) {
-      webResource.addFilter(new LoggingFilter());
+      client.register(LoggingFilter.class);
     }
-    getJerseyClient().addFilter(new HTTPBasicAuthFilter(username, password));
-    // add query param
-    if (queryParam != null) {
-      webResource = webResource.queryParams(queryParam);
-    }
-    WebResource.Builder builder = webResource.getRequestBuilder();
-    return builder;
-  }
 
-  private String webApplicationExceptionHandler(
-      WebApplicationException webApplicationException, String url) {
-    Object object = null;
-    try {
-      object = webApplicationException.getResponse().getEntity();
-    } catch (Exception e) {
-      LOG.error(e.getLocalizedMessage());
-    }
-    String errMsg = null;
-    if (object != null) {
-      errMsg = object.toString();
-    } else {
-      errMsg = webApplicationException.getMessage();
+    WebTarget target = client.target(url);
+    LOG.debug("URL: " + url);
+    for (Map.Entry<String, List<String>> entry : queryParam.entrySet()) {
+      target = target.queryParam(entry.getKey(), entry.getValue());
+      LOG.debug(
+        String.format("Query parameter: name - %s  ; value - %s ;" + entry.getKey(), StringUtils.join(entry.getValue(),',')));
     }
-    errMsg = "URL: " + url + errMsg;
-    LOG.error(errMsg);
-    return errMsg;
-  }
-
-  private String uniformInterfaceExceptionHandler(
-      UniformInterfaceException uniformInterfaceException, String url) {
-    Object object = null;
-    String errMsg = null;
-    ClientResponse clientResponse = uniformInterfaceException.getResponse();
+    target
+      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_USERNAME, username)
+      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_PASSWORD, password);
+    Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
     try {
-      object = clientResponse.getEntity(Object.class);
-      if (object != null) {
-        errMsg = object.toString();
-      }
+      return invocationBuilder.get().readEntity(klass);
     } catch (Exception e) {
-      InputStream inputStream = clientResponse.getEntityInputStream();
-      try {
-        errMsg = IOUtils.toString(inputStream);
-      } catch (IOException e1) {
-        LOG.error(e.getLocalizedMessage());
-      }
-    }
-    if (errMsg == null) {
-      errMsg = uniformInterfaceException.getLocalizedMessage();
-    }
-    LOG.error("url :" + url + " Response : " + errMsg);
-    return errMsg;
-  }
-
-  private String clientHandlerExceptionHandler(
-      ClientHandlerException clientHandlerException, String url) {
-    String errMsg = clientHandlerException.getLocalizedMessage();
-    errMsg = "URL: " + url + errMsg;
-    LOG.error(errMsg);
-    return errMsg;
-  }
-
-  private String getQueryParameter(MultivaluedMapImpl queryParam) {
-    StringBuilder builder = new StringBuilder();
-    if (queryParam != null) {
-      builder.append(" Query param :");
-      for (Entry<String, List<String>> entry : queryParam.entrySet()) {
-        String name = entry.getKey();
-        builder.append(" name : " + name + " " + "values : [");
-        List<String> valuesList = entry.getValue();
-        if (valuesList != null) {
-          for (int index = 0; index < valuesList.size(); index++) {
-            String value = valuesList.get(index);
-            if (index > 0) {
-              builder.append(",");
-            }
-            builder.append(value);
-          }
-        }
-        builder.append("]");
-      }
+      throw new Exception(e.getCause());
+    } finally {
+      localJerseyClient.remove();
     }
-    return builder.toString();
-  }
-
-  private void cleanup() {
-    localJerseyClient.remove();
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/82e2d1b1/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
index aee16bf..dbe5210 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
@@ -48,18 +48,10 @@
 	<!-- Servlet mapping for REST -->
 	<servlet>
 		<servlet-name>REST service</servlet-name>
-		<servlet-class>com.sun.jersey.spi.spring.container.servlet.SpringServlet</servlet-class>
+		<servlet-class>org.glassfish.jersey.servlet.ServletContainer</servlet-class>
 		<init-param>
 			<param-name>jersey.config.server.provider.packages</param-name>
-			<param-value>org.apache.ambari.logsearch.rest</param-value>
-		</init-param>
-		<init-param>
-			<param-name>com.sun.jersey.spi.container.ContainerRequestFilters</param-name>
-			<param-value>org.apache.ambari.logsearch.common.RESTAPIFilter</param-value>
-		</init-param>
-		<init-param>
-			<param-name>com.sun.jersey.spi.container.ContainerResponseFilters</param-name>
-			<param-value>org.apache.ambari.logsearch.common.RESTAPIFilter</param-value>
+			<param-value>org.apache.ambari.logsearch.rest,io.swagger.jaxrs.listing</param-value>
 		</init-param>
 		<load-on-startup>1</load-on-startup>
 	</servlet>


[21/50] [abbrv] ambari git commit: AMBARI-18236. Fix package structure in Logfeeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18236. Fix package structure in Logfeeder (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df3de10d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df3de10d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df3de10d

Branch: refs/heads/branch-dev-logsearch
Commit: df3de10d817b03b138e44d4d0ed258533fbae7c5
Parents: 0dbc40b
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Tue Aug 23 17:55:15 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:59 2016 +0200

----------------------------------------------------------------------
 .../org/apache/ambari/logfeeder/AliasUtil.java  |  99 ----
 .../apache/ambari/logfeeder/ConfigBlock.java    | 260 ---------
 .../org/apache/ambari/logfeeder/InputMgr.java   | 451 ---------------
 .../org/apache/ambari/logfeeder/LogFeeder.java  |  10 +-
 .../ambari/logfeeder/LogFeederAMSClient.java    |  80 ---
 .../apache/ambari/logfeeder/LogFeederUtil.java  | 556 ------------------
 .../apache/ambari/logfeeder/MetricCount.java    |  31 --
 .../org/apache/ambari/logfeeder/MetricsMgr.java | 177 ------
 .../org/apache/ambari/logfeeder/MurmurHash.java | 163 ------
 .../org/apache/ambari/logfeeder/OutputMgr.java  | 262 ---------
 .../ambari/logfeeder/common/ConfigBlock.java    | 263 +++++++++
 .../logfeeder/common/LogfeederException.java    |  31 ++
 .../logfeeder/exception/LogfeederException.java |  31 --
 .../apache/ambari/logfeeder/filter/Filter.java  |  16 +-
 .../ambari/logfeeder/filter/FilterGrok.java     |   6 +-
 .../ambari/logfeeder/filter/FilterJSON.java     |   4 +-
 .../ambari/logfeeder/filter/FilterKeyValue.java |   6 +-
 .../apache/ambari/logfeeder/input/Input.java    |   9 +-
 .../ambari/logfeeder/input/InputFile.java       |   2 +-
 .../apache/ambari/logfeeder/input/InputMgr.java | 451 +++++++++++++++
 .../ambari/logfeeder/input/InputS3File.java     |   4 +-
 .../ambari/logfeeder/input/InputSimulate.java   |   2 +-
 .../logconfig/FetchConfigFromSolr.java          |   2 +-
 .../logfeeder/logconfig/LogfeederScheduler.java |   2 +-
 .../logconfig/filter/ApplyLogFilter.java        |   2 +-
 .../logconfig/filter/FilterLogData.java         |   2 +-
 .../ambari/logfeeder/mapper/MapperDate.java     |   2 +-
 .../logfeeder/mapper/MapperFieldName.java       |   2 +-
 .../logfeeder/mapper/MapperFieldValue.java      |   2 +-
 .../logfeeder/metrics/LogFeederAMSClient.java   |  81 +++
 .../ambari/logfeeder/metrics/MetricCount.java   |  31 ++
 .../ambari/logfeeder/metrics/MetricsMgr.java    | 178 ++++++
 .../apache/ambari/logfeeder/output/Output.java  |   6 +-
 .../ambari/logfeeder/output/OutputFile.java     |   2 +-
 .../ambari/logfeeder/output/OutputHDFSFile.java |   2 +-
 .../ambari/logfeeder/output/OutputKafka.java    |   2 +-
 .../ambari/logfeeder/output/OutputMgr.java      | 263 +++++++++
 .../ambari/logfeeder/output/OutputS3File.java   |   4 +-
 .../ambari/logfeeder/output/OutputSolr.java     |   2 +-
 .../logfeeder/output/S3LogPathResolver.java     |   4 +-
 .../logfeeder/output/S3OutputConfiguration.java |   4 +-
 .../ambari/logfeeder/output/S3Uploader.java     |   4 +-
 .../org/apache/ambari/logfeeder/s3/AWSUtil.java |  84 ---
 .../org/apache/ambari/logfeeder/s3/S3Util.java  | 186 -------
 .../apache/ambari/logfeeder/util/AWSUtil.java   |  84 +++
 .../apache/ambari/logfeeder/util/AliasUtil.java |  99 ++++
 .../ambari/logfeeder/util/LogFeederUtil.java    | 557 +++++++++++++++++++
 .../ambari/logfeeder/util/MurmurHash.java       | 163 ++++++
 .../apache/ambari/logfeeder/util/S3Util.java    | 186 +++++++
 .../apache/ambari/logfeeder/util/SolrUtil.java  |   1 -
 .../ambari/logfeeder/filter/FilterGrokTest.java |   2 +-
 .../ambari/logfeeder/filter/FilterJSONTest.java |   6 +-
 .../logfeeder/filter/FilterKeyValueTest.java    |   2 +-
 .../ambari/logfeeder/input/InputFileTest.java   |   1 -
 .../ambari/logfeeder/mapper/MapperDateTest.java |   2 +-
 .../logfeeder/output/S3LogPathResolverTest.java |   3 +-
 .../ambari/logfeeder/output/S3UploaderTest.java |   2 +-
 .../apache/ambari/logfeeder/s3/AWSUtilTest.java |  27 -
 .../apache/ambari/logfeeder/s3/S3UtilTest.java  |  38 --
 .../ambari/logfeeder/util/AWSUtilTest.java      |  29 +
 .../ambari/logfeeder/util/S3UtilTest.java       |  40 ++
 61 files changed, 2519 insertions(+), 2504 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java
deleted file mode 100644
index 44bc829..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/AliasUtil.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder;
-
-import java.io.File;
-import java.util.HashMap;
-
-import org.apache.log4j.Logger;
-
-public class AliasUtil {
-
-  private static Logger logger = Logger.getLogger(AliasUtil.class);
-
-  private static AliasUtil instance = null;
-
-  private static String aliasConfigJson = "alias_config.json";
-
-  private HashMap<String, Object> aliasMap = null;
-
-  public static enum ALIAS_TYPE {
-    INPUT, FILTER, MAPPER, OUTPUT
-  }
-
-  public static enum ALIAS_PARAM {
-    KLASS
-  }
-
-  private AliasUtil() {
-    init();
-  }
-
-  public static AliasUtil getInstance() {
-    if (instance == null) {
-      synchronized (AliasUtil.class) {
-        if (instance == null) {
-          instance = new AliasUtil();
-        }
-      }
-    }
-    return instance;
-  }
-
-  /**
-   */
-  private void init() {
-    File jsonFile = LogFeederUtil.getFileFromClasspath(aliasConfigJson);
-    if (jsonFile != null) {
-      this.aliasMap = LogFeederUtil.readJsonFromFile(jsonFile);
-    }
-
-  }
-
-
-  public String readAlias(String key, ALIAS_TYPE aliastype, ALIAS_PARAM aliasParam) {
-    String result = key;// key as a default value;
-    HashMap<String, String> aliasInfo = getAliasInfo(key, aliastype);
-    String value = aliasInfo.get(aliasParam.name().toLowerCase());
-    if (value != null && !value.isEmpty()) {
-      result = value;
-      logger.debug("Alias found for key :" + key + ",  param :" + aliasParam.name().toLowerCase() + ", value :"
-        + value + " aliastype:" + aliastype.name());
-    } else {
-      logger.debug("Alias not found for key :" + key + ", param :" + aliasParam.name().toLowerCase());
-    }
-    return result;
-  }
-
-  @SuppressWarnings("unchecked")
-  private HashMap<String, String> getAliasInfo(String key, ALIAS_TYPE aliastype) {
-    HashMap<String, String> aliasInfo = null;
-    if (aliasMap != null) {
-      String typeKey = aliastype.name().toLowerCase();
-      HashMap<String, Object> typeJson = (HashMap<String, Object>) aliasMap.get(typeKey);
-      if (typeJson != null) {
-        aliasInfo = (HashMap<String, String>) typeJson.get(key);
-      }
-    }
-    if (aliasInfo == null) {
-      aliasInfo = new HashMap<String, String>();
-    }
-    return aliasInfo;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java
deleted file mode 100644
index c3ccc47..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/ConfigBlock.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Logger;
-import org.apache.log4j.Priority;
-
-
-public abstract class ConfigBlock {
-  static private Logger logger = Logger.getLogger(ConfigBlock.class);
-
-  private boolean drain = false;
-
-  protected Map<String, Object> configs;
-  protected Map<String, String> contextFields = new HashMap<String, String>();
-  public MetricCount statMetric = new MetricCount();
-
-  /**
-   *
-   */
-  public ConfigBlock() {
-    super();
-  }
-
-  /**
-   * Used while logging. Keep it short and meaningful
-   */
-  public abstract String getShortDescription();
-
-  /**
-   * Every implementor need to give name to the thread they create
-   */
-  public String getNameForThread() {
-    return this.getClass().getSimpleName();
-  }
-
-  /**
-   * @param metricsList
-   */
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    metricsList.add(statMetric);
-  }
-
-  /**
-   * This method needs to be overwritten by deriving classes.
-   */
-  public void init() throws Exception {
-  }
-
-  public void loadConfig(Map<String, Object> map) {
-    configs = LogFeederUtil.cloneObject(map);
-
-    Map<String, String> nvList = getNVList("add_fields");
-    if (nvList != null) {
-      contextFields.putAll(nvList);
-    }
-  }
-
-  public Map<String, Object> getConfigs() {
-    return configs;
-  }
-
-  @SuppressWarnings("unchecked")
-  public boolean isEnabled() {
-    boolean isEnabled = getBooleanValue("is_enabled", true);
-    if (isEnabled) {
-      // Let's check for static conditions
-      Map<String, Object> conditions = (Map<String, Object>) configs
-        .get("conditions");
-      boolean allow = true;
-      if (conditions != null && conditions.size() > 0) {
-        allow = false;
-        for (String conditionType : conditions.keySet()) {
-          if (conditionType.equalsIgnoreCase("fields")) {
-            Map<String, Object> fields = (Map<String, Object>) conditions
-              .get("fields");
-            for (String fieldName : fields.keySet()) {
-              Object values = fields.get(fieldName);
-              if (values instanceof String) {
-                allow = isFieldConditionMatch(fieldName,
-                  (String) values);
-              } else {
-                List<String> listValues = (List<String>) values;
-                for (String stringValue : listValues) {
-                  allow = isFieldConditionMatch(fieldName,
-                    stringValue);
-                  if (allow) {
-                    break;
-                  }
-                }
-              }
-              if (allow) {
-                break;
-              }
-            }
-          }
-          if (allow) {
-            break;
-          }
-        }
-        isEnabled = allow;
-      }
-    }
-    return isEnabled;
-  }
-
-  public boolean isFieldConditionMatch(String fieldName, String stringValue) {
-    boolean allow = false;
-    String fieldValue = (String) configs.get(fieldName);
-    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
-      allow = true;
-    } else {
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
-      if (addFields != null && addFields.get(fieldName) != null) {
-        String addFieldValue = (String) addFields.get(fieldName);
-        if (stringValue.equalsIgnoreCase(addFieldValue)) {
-          allow = true;
-        }
-      }
-
-    }
-    return allow;
-  }
-
-  @SuppressWarnings("unchecked")
-  public Map<String, String> getNVList(String key) {
-    return (Map<String, String>) configs.get(key);
-  }
-
-  public String getStringValue(String key) {
-    Object value = configs.get(key);
-    if (value != null && value.toString().equalsIgnoreCase("none")) {
-      value = null;
-    }
-    if (value != null) {
-      return value.toString();
-    }
-    return null;
-  }
-
-  public String getStringValue(String key, String defaultValue) {
-    Object value = configs.get(key);
-    if (value != null && value.toString().equalsIgnoreCase("none")) {
-      value = null;
-    }
-
-    if (value != null) {
-      return value.toString();
-    }
-    return defaultValue;
-  }
-
-  public Object getConfigValue(String key) {
-    return configs.get(key);
-  }
-
-  public boolean getBooleanValue(String key, boolean defaultValue) {
-    String strValue = getStringValue(key);
-    boolean retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
-    }
-    return retValue;
-  }
-
-  public int getIntValue(String key, int defaultValue) {
-    String strValue = getStringValue(key);
-    int retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      try {
-        retValue = Integer.parseInt(strValue);
-      } catch (Throwable t) {
-        logger.error("Error parsing integer value. key=" + key
-          + ", value=" + strValue);
-      }
-    }
-    return retValue;
-  }
-  
-  public long getLongValue(String key, long defaultValue) {
-    String strValue = getStringValue(key);
-    Long retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      try {
-        retValue = Long.parseLong(strValue);
-      } catch (Throwable t) {
-        logger.error("Error parsing long value. key=" + key + ", value="
-            + strValue);
-      }
-    }
-    return retValue;
-  }
-
-  public Map<String, String> getContextFields() {
-    return contextFields;
-  }
-
-  public void incrementStat(int count) {
-    statMetric.count += count;
-  }
-
-  public void logStatForMetric(MetricCount metric, String prefixStr) {
-    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key="
-      + getShortDescription());
-  }
-
-  synchronized public void logStat() {
-    logStatForMetric(statMetric, "Stat");
-  }
-
-  public boolean logConfgs(Priority level) {
-    if (level.toInt() == Priority.INFO_INT && !logger.isInfoEnabled()) {
-      return false;
-    }
-    if (level.toInt() == Priority.DEBUG_INT && !logger.isDebugEnabled()) {
-      return false;
-    }
-    logger.log(level, "Printing configuration Block="
-      + getShortDescription());
-    logger.log(level, "configs=" + configs);
-    logger.log(level, "contextFields=" + contextFields);
-    return true;
-  }
-
-  public boolean isDrain() {
-    return drain;
-  }
-
-  public void setDrain(boolean drain) {
-    this.drain = drain;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java
deleted file mode 100644
index fa60702..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/InputMgr.java
+++ /dev/null
@@ -1,451 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import java.io.EOFException;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.input.InputFile;
-import org.apache.commons.io.filefilter.WildcardFileFilter;
-import org.apache.log4j.Logger;
-import org.apache.solr.common.util.Base64;
-
-public class InputMgr {
-  private static final Logger logger = Logger.getLogger(InputMgr.class);
-
-  private List<Input> inputList = new ArrayList<Input>();
-  private Set<Input> notReadyList = new HashSet<Input>();
-
-  private boolean isDrain = false;
-  private boolean isAnyInputTail = false;
-
-  private String checkPointSubFolderName = "logfeeder_checkpoints";
-  private File checkPointFolderFile = null;
-
-  private MetricCount filesCountMetric = new MetricCount();
-
-  private String checkPointExtension = ".cp";
-  
-  private Thread inputIsReadyMonitor = null;
-
-  public List<Input> getInputList() {
-    return inputList;
-  }
-
-  public void add(Input input) {
-    inputList.add(input);
-  }
-
-  public void removeInput(Input input) {
-    logger.info("Trying to remove from inputList. "
-      + input.getShortDescription());
-    Iterator<Input> iter = inputList.iterator();
-    while (iter.hasNext()) {
-      Input iterInput = iter.next();
-      if (iterInput.equals(input)) {
-        logger.info("Removing Input from inputList. "
-          + input.getShortDescription());
-        iter.remove();
-      }
-    }
-  }
-
-  public int getActiveFilesCount() {
-    int count = 0;
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        count++;
-      }
-    }
-    return count;
-  }
-
-  public void init() {
-    filesCountMetric.metricsName = "input.files.count";
-    filesCountMetric.isPointInTime = true;
-
-    checkPointExtension = LogFeederUtil.getStringProperty(
-      "logfeeder.checkpoint.extension", checkPointExtension);
-    for (Input input : inputList) {
-      try {
-        input.init();
-        if (input.isTail()) {
-          isAnyInputTail = true;
-        }
-      } catch (Exception e) {
-        logger.error(
-          "Error initializing input. "
-            + input.getShortDescription(), e);
-      }
-    }
-
-    if (isAnyInputTail) {
-      logger.info("Determining valid checkpoint folder");
-      boolean isCheckPointFolderValid = false;
-      // We need to keep track of the files we are reading.
-      String checkPointFolder = LogFeederUtil
-        .getStringProperty("logfeeder.checkpoint.folder");
-      if (checkPointFolder != null && !checkPointFolder.isEmpty()) {
-        checkPointFolderFile = new File(checkPointFolder);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's try home folder
-        String userHome = LogFeederUtil.getStringProperty("user.home");
-        if (userHome != null) {
-          checkPointFolderFile = new File(userHome,
-            checkPointSubFolderName);
-          logger.info("Checking if home folder can be used for checkpoints. Folder="
-            + checkPointFolderFile);
-          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        }
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's use tmp folder
-        String tmpFolder = LogFeederUtil
-          .getStringProperty("java.io.tmpdir");
-        if (tmpFolder == null) {
-          tmpFolder = "/tmp";
-        }
-        checkPointFolderFile = new File(tmpFolder,
-          checkPointSubFolderName);
-        logger.info("Checking if tmps folder can be used for checkpoints. Folder="
-          + checkPointFolderFile);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        if (isCheckPointFolderValid) {
-          logger.warn("Using tmp folder "
-            + checkPointFolderFile
-            + " to store check points. This is not recommended."
-            + "Please set logfeeder.checkpoint.folder property");
-        }
-      }
-
-      if (isCheckPointFolderValid) {
-        logger.info("Using folder " + checkPointFolderFile
-          + " for storing checkpoints");
-      }
-    }
-
-  }
-
-  public File getCheckPointFolderFile() {
-    return checkPointFolderFile;
-  }
-
-  private boolean verifyCheckPointFolder(File folderPathFile) {
-    if (!folderPathFile.exists()) {
-      // Create the folder
-      try {
-        if (!folderPathFile.mkdir()) {
-          logger.warn("Error creating folder for check point. folder="
-            + folderPathFile);
-        }
-      } catch (Throwable t) {
-        logger.warn("Error creating folder for check point. folder="
-          + folderPathFile, t);
-      }
-    }
-
-    if (folderPathFile.exists() && folderPathFile.isDirectory()) {
-      // Let's check whether we can create a file
-      File testFile = new File(folderPathFile, UUID.randomUUID()
-        .toString());
-      try {
-        testFile.createNewFile();
-        return testFile.delete();
-      } catch (IOException e) {
-        logger.warn(
-          "Couldn't create test file in "
-            + folderPathFile.getAbsolutePath()
-            + " for checkPoint", e);
-      }
-    }
-    return false;
-  }
-
-  public void monitor() {
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        input.monitor();
-      } else {
-        if (input.isTail()) {
-          logger.info("Adding input to not ready list. Note, it is possible this component is not run on this host. So it might not be an issue. "
-            + input.getShortDescription());
-          notReadyList.add(input);
-        } else {
-          logger.info("Input is not ready, so going to ignore it "
-            + input.getShortDescription());
-        }
-      }
-    }
-    // Start the monitoring thread if any file is in tail mode
-    if (isAnyInputTail) {
-       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
-        @Override
-        public void run() {
-          logger.info("Going to monitor for these missing files: "
-            + notReadyList.toString());
-          while (true) {
-            if (isDrain) {
-              logger.info("Exiting missing file monitor.");
-              break;
-            }
-            try {
-              Iterator<Input> iter = notReadyList.iterator();
-              while (iter.hasNext()) {
-                Input input = iter.next();
-                try {
-                  if (input.isReady()) {
-                    input.monitor();
-                    iter.remove();
-                  }
-                } catch (Throwable t) {
-                  logger.error("Error while enabling monitoring for input. "
-                    + input.getShortDescription());
-                }
-              }
-              Thread.sleep(30 * 1000);
-            } catch (Throwable t) {
-              // Ignore
-            }
-          }
-        }
-      };
-      inputIsReadyMonitor.start();
-    }
-  }
-
-  public void addToNotReady(Input notReadyInput) {
-    notReadyList.add(notReadyInput);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    for (Input input : inputList) {
-      input.addMetricsContainers(metricsList);
-    }
-    filesCountMetric.count = getActiveFilesCount();
-    metricsList.add(filesCountMetric);
-  }
-
-  public void logStats() {
-    for (Input input : inputList) {
-      input.logStat();
-    }
-
-    filesCountMetric.count = getActiveFilesCount();
-    LogFeederUtil.logStatForMetric(filesCountMetric,
-      "Stat: Files Monitored Count", null);
-  }
-
-  public void close() {
-    for (Input input : inputList) {
-      try {
-        input.setDrain(true);
-      } catch (Throwable t) {
-        logger.error(
-          "Error while draining. input="
-            + input.getShortDescription(), t);
-      }
-    }
-    isDrain = true;
-
-    // Need to get this value from property
-    int iterations = 30;
-    int waitTimeMS = 1000;
-    int i = 0;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          try {
-            allClosed = false;
-            logger.warn("Waiting for input to close. "
-              + input.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
-          }
-        }
-      }
-      if (allClosed) {
-        break;
-      }
-    }
-    if (!allClosed) {
-      logger.warn("Some inputs were not closed. Iterations=" + i);
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          logger.warn("Input not closed. Will ignore it."
-            + input.getShortDescription());
-        }
-      }
-    } else {
-      logger.info("All inputs are closed. Iterations=" + i);
-    }
-
-  }
-
-  public void checkInAll() {
-    for (Input input : inputList) {
-      input.checkIn();
-    }
-  }
-
-  public void cleanCheckPointFiles() {
-
-    if (checkPointFolderFile == null) {
-      logger.info("Will not clean checkPoint files. checkPointFolderFile="
-        + checkPointFolderFile);
-      return;
-    }
-    logger.info("Cleaning checkPoint files. checkPointFolderFile="
-      + checkPointFolderFile.getAbsolutePath());
-    try {
-      // Loop over the check point files and if filePath is not present, then move to closed
-      String searchPath = "*" + checkPointExtension;
-      FileFilter fileFilter = new WildcardFileFilter(searchPath);
-      File[] checkPointFiles = checkPointFolderFile.listFiles(fileFilter);
-      int totalCheckFilesDeleted = 0;
-      for (File checkPointFile : checkPointFiles) {
-        RandomAccessFile checkPointReader = null;
-        try {
-          checkPointReader = new RandomAccessFile(checkPointFile, "r");
-
-          int contentSize = checkPointReader.readInt();
-          byte b[] = new byte[contentSize];
-          int readSize = checkPointReader.read(b, 0, contentSize);
-          if (readSize != contentSize) {
-            logger.error("Couldn't read expected number of bytes from checkpoint file. expected="
-              + contentSize
-              + ", read="
-              + readSize
-              + ", checkPointFile=" + checkPointFile);
-          } else {
-            // Create JSON string
-            String jsonCheckPointStr = new String(b, 0, readSize);
-            Map<String, Object> jsonCheckPoint = LogFeederUtil
-              .toJSONObject(jsonCheckPointStr);
-
-            String logFilePath = (String) jsonCheckPoint
-              .get("file_path");
-            String logFileKey = (String) jsonCheckPoint
-              .get("file_key");
-            if (logFilePath != null && logFileKey != null) {
-              boolean deleteCheckPointFile = false;
-              File logFile = new File(logFilePath);
-              if (logFile.exists()) {
-                Object fileKeyObj = InputFile
-                  .getFileKey(logFile);
-                String fileBase64 = Base64
-                  .byteArrayToBase64(fileKeyObj
-                    .toString().getBytes());
-                if (!logFileKey.equals(fileBase64)) {
-                  deleteCheckPointFile = true;
-                  logger.info("CheckPoint clean: File key has changed. old="
-                    + logFileKey
-                    + ", new="
-                    + fileBase64
-                    + ", filePath="
-                    + logFilePath
-                    + ", checkPointFile="
-                    + checkPointFile.getAbsolutePath());
-                }
-              } else {
-                logger.info("CheckPoint clean: Log file doesn't exist. filePath="
-                  + logFilePath
-                  + ", checkPointFile="
-                  + checkPointFile.getAbsolutePath());
-                deleteCheckPointFile = true;
-              }
-              if (deleteCheckPointFile) {
-                logger.info("Deleting CheckPoint file="
-                  + checkPointFile.getAbsolutePath()
-                  + ", logFile=" + logFilePath);
-                checkPointFile.delete();
-                totalCheckFilesDeleted++;
-              }
-            }
-          }
-        } catch (EOFException eof) {
-          logger.warn("Caught EOFException. Ignoring reading existing checkPoint file. "
-            + checkPointFile);
-        } catch (Throwable t) {
-          logger.error("Error while checking checkPoint file. "
-            + checkPointFile, t);
-        } finally {
-          if (checkPointReader != null) {
-            try {
-              checkPointReader.close();
-            } catch (Throwable t) {
-              logger.error("Error closing checkPoint file. "
-                + checkPointFile, t);
-            }
-          }
-        }
-      }
-      logger.info("Deleted " + totalCheckFilesDeleted
-        + " checkPoint file(s). checkPointFolderFile="
-        + checkPointFolderFile.getAbsolutePath());
-
-    } catch (Throwable t) {
-      logger.error("Error while cleaning checkPointFiles", t);
-    }
-  }
-
-  public void waitOnAllInputs() {
-    //wait on inputs
-    if (inputList != null) {
-      for (Input input : inputList) {
-        if (input != null) {
-          Thread inputThread = input.getThread();
-          if (inputThread != null) {
-            try {
-              inputThread.join();
-            } catch (InterruptedException e) {
-              // ignore
-            }
-          }
-        }
-      }
-    }
-    // wait on monitor
-    if (inputIsReadyMonitor != null) {
-      try {
-        this.close();
-        inputIsReadyMonitor.join();
-      } catch (InterruptedException e) {
-        // ignore
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
index 3cf0fff..373d743 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
@@ -37,14 +37,20 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.ambari.logfeeder.AliasUtil.ALIAS_PARAM;
-import org.apache.ambari.logfeeder.AliasUtil.ALIAS_TYPE;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputMgr;
 import org.apache.ambari.logfeeder.input.InputSimulate;
 import org.apache.ambari.logfeeder.logconfig.LogfeederScheduler;
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.metrics.MetricsMgr;
 import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.util.AliasUtil;
 import org.apache.ambari.logfeeder.util.FileUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_PARAM;
+import org.apache.ambari.logfeeder.util.AliasUtil.ALIAS_TYPE;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java
deleted file mode 100644
index da61d83..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederAMSClient.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.log4j.Logger;
-
-// TODO: Refactor for failover
-public class LogFeederAMSClient extends AbstractTimelineMetricsSink {
-  private static final Logger logger = Logger.getLogger(LogFeederAMSClient.class);
-
-  private String collectorHosts = null;
-
-  public LogFeederAMSClient() {
-    collectorHosts = LogFeederUtil
-      .getStringProperty("logfeeder.metrics.collector.hosts");
-    if (collectorHosts != null && collectorHosts.trim().length() == 0) {
-      collectorHosts = null;
-    }
-    if (collectorHosts != null) {
-      collectorHosts = collectorHosts.trim();
-    }
-    logger.info("AMS collector URL=" + collectorHosts);
-  }
-
-  @Override
-  public String getCollectorUri(String host) {
-    return collectorHosts;
-  }
-
-  @Override
-  protected int getTimeoutSeconds() {
-    // TODO: Hard coded timeout
-    return 10;
-  }
-
-  @Override
-  protected String getZookeeperQuorum() {
-    return null;
-  }
-
-  @Override
-  protected String getConfiguredCollectors() {
-    return null;
-  }
-
-  @Override
-  protected String getHostname() {
-    return null;
-  }
-
-  @Override
-  protected boolean emitMetrics(TimelineMetrics metrics) {
-    return super.emitMetrics(metrics);
-  }
-
-  @Override
-  protected String getCollectorProtocol() {
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
deleted file mode 100644
index a86d989..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeederUtil.java
+++ /dev/null
@@ -1,556 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.lang.reflect.Type;
-import java.net.InetAddress;
-import java.net.URL;
-import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Hashtable;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.TimeZone;
-
-import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.mapper.Mapper;
-import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.util.PlaceholderUtil;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
-
-import com.google.common.collect.ObjectArrays;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.reflect.TypeToken;
-
-/**
- * This class contains utility methods used by LogFeeder
- */
-public class LogFeederUtil {
-  private static final Logger logger = Logger.getLogger(LogFeederUtil.class);
-
-  private static final int HASH_SEED = 31174077;
-  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-  public final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
-  private static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
-
-  private static Properties props;
-
-  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
-  private static int logInterval = 30000; // 30 seconds
-
-  public static String hostName = null;
-  public static String ipAddress = null;
-  
-  private static String logfeederTempDir = null;
-  
-  private static final Object _LOCK = new Object();
-  
-  static{
-    setHostNameAndIP();
-  }
-  
-  public static Gson getGson() {
-    return gson;
-  }
-
-  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
-    @Override
-    protected SimpleDateFormat initialValue() {
-      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
-      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
-      return sdf;
-    }
-  };
-
-  /**
-   * This method will read the properties from System, followed by propFile
-   * and finally from the map
-   */
-  public static void loadProperties(String propFile, String[] propNVList)
-    throws Exception {
-    logger.info("Loading properties. propFile=" + propFile);
-    props = new Properties(System.getProperties());
-    boolean propLoaded = false;
-
-    // First get properties file path from environment value
-    String propertiesFilePath = System.getProperty("properties");
-    if (propertiesFilePath != null && !propertiesFilePath.isEmpty()) {
-      File propertiesFile = new File(propertiesFilePath);
-      if (propertiesFile.exists() && propertiesFile.isFile()) {
-        logger.info("Properties file path set in environment. Loading properties file="
-          + propertiesFilePath);
-        FileInputStream fileInputStream = null;
-        try {
-          fileInputStream = new FileInputStream(propertiesFile);
-          props.load(fileInputStream);
-          propLoaded = true;
-        } catch (Throwable t) {
-          logger.error("Error loading properties file. properties file="
-            + propertiesFile.getAbsolutePath());
-        } finally {
-          if (fileInputStream != null) {
-            try {
-              fileInputStream.close();
-            } catch (Throwable t) {
-              // Ignore error
-            }
-          }
-        }
-      } else {
-        logger.error("Properties file path set in environment, but file not found. properties file="
-          + propertiesFilePath);
-      }
-    }
-
-    if (!propLoaded) {
-      BufferedInputStream fileInputStream = null;
-      try {
-        // Properties not yet loaded, let's try from class loader
-        fileInputStream = (BufferedInputStream) LogFeeder.class
-          .getClassLoader().getResourceAsStream(propFile);
-        if (fileInputStream != null) {
-          logger.info("Loading properties file " + propFile
-            + " from classpath");
-          props.load(fileInputStream);
-          propLoaded = true;
-        } else {
-          logger.fatal("Properties file not found in classpath. properties file name= "
-            + propFile);
-        }
-      } finally {
-        if (fileInputStream != null) {
-          try {
-            fileInputStream.close();
-          } catch (IOException e) {
-          }
-        }
-      }
-    }
-
-    if (!propLoaded) {
-      logger.fatal("Properties file is not loaded.");
-      throw new Exception("Properties not loaded");
-    } else {
-      updatePropertiesFromMap(propNVList);
-    }
-  }
-
-  private static void updatePropertiesFromMap(String[] nvList) {
-    if (nvList == null) {
-      return;
-    }
-    logger.info("Trying to load additional proeprties from argument paramters. nvList.length="
-      + nvList.length);
-    if (nvList != null && nvList.length > 0) {
-      for (String nv : nvList) {
-        logger.info("Passed nv=" + nv);
-        if (nv.startsWith("-") && nv.length() > 1) {
-          nv = nv.substring(1);
-          logger.info("Stripped nv=" + nv);
-          int i = nv.indexOf("=");
-          if (nv.length() > i) {
-            logger.info("Candidate nv=" + nv);
-            String name = nv.substring(0, i);
-            String value = nv.substring(i + 1);
-            logger.info("Adding property from argument to properties. name="
-              + name + ", value=" + value);
-            props.put(name, value);
-          }
-        }
-      }
-    }
-  }
-
-  static public String getStringProperty(String key) {
-    if (props != null) {
-      return props.getProperty(key);
-    }
-    return null;
-  }
-
-  static public String getStringProperty(String key, String defaultValue) {
-    if (props != null) {
-      return props.getProperty(key, defaultValue);
-    }
-    return defaultValue;
-  }
-
-  static public boolean getBooleanProperty(String key, boolean defaultValue) {
-    String strValue = getStringProperty(key);
-    return toBoolean(strValue, defaultValue);
-  }
-
-  private static boolean toBoolean(String strValue, boolean defaultValue) {
-    boolean retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
-    }
-    return retValue;
-  }
-
-  static public int getIntProperty(String key, int defaultValue) {
-    String strValue = getStringProperty(key);
-    int retValue = defaultValue;
-    retValue = objectToInt(strValue, retValue, ", key=" + key);
-    return retValue;
-  }
-
-  public static int objectToInt(Object objValue, int retValue,
-                                String errMessage) {
-    if (objValue == null) {
-      return retValue;
-    }
-    String strValue = objValue.toString();
-    if (!StringUtils.isEmpty(strValue)) {
-      try {
-        retValue = Integer.parseInt(strValue);
-      } catch (Throwable t) {
-        logger.error("Error parsing integer value. str=" + strValue
-          + ", " + errMessage);
-      }
-    }
-    return retValue;
-  }
-
-  public static boolean isEnabled(Map<String, Object> conditionConfigs,
-                                  Map<String, Object> valueConfigs) {
-    boolean allow = toBoolean((String) valueConfigs.get("is_enabled"), true);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs
-      .get("conditions");
-    if (conditions != null && conditions.size() > 0) {
-      allow = false;
-      for (String conditionType : conditions.keySet()) {
-        if (conditionType.equalsIgnoreCase("fields")) {
-          @SuppressWarnings("unchecked")
-          Map<String, Object> fields = (Map<String, Object>) conditions
-            .get("fields");
-          for (String fieldName : fields.keySet()) {
-            Object values = fields.get(fieldName);
-            if (values instanceof String) {
-              allow = isFieldConditionMatch(valueConfigs,
-                fieldName, (String) values);
-            } else {
-              @SuppressWarnings("unchecked")
-              List<String> listValues = (List<String>) values;
-              for (String stringValue : listValues) {
-                allow = isFieldConditionMatch(valueConfigs,
-                  fieldName, stringValue);
-                if (allow) {
-                  break;
-                }
-              }
-            }
-            if (allow) {
-              break;
-            }
-          }
-        }
-        if (allow) {
-          break;
-        }
-      }
-    }
-    return allow;
-  }
-
-  public static boolean isFieldConditionMatch(Map<String, Object> configs,
-                                              String fieldName, String stringValue) {
-    boolean allow = false;
-    String fieldValue = (String) configs.get(fieldName);
-    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
-      allow = true;
-    } else {
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
-      if (addFields != null && addFields.get(fieldName) != null) {
-        String addFieldValue = (String) addFields.get(fieldName);
-        if (stringValue.equalsIgnoreCase(addFieldValue)) {
-          allow = true;
-        }
-      }
-
-    }
-    return allow;
-  }
-
-  public static void logStatForMetric(MetricCount metric, String prefixStr,
-                                      String postFix) {
-    long currStat = metric.count;
-    long currMS = System.currentTimeMillis();
-    if (currStat > metric.prevLogCount) {
-      if (postFix == null) {
-        postFix = "";
-      }
-      logger.info(prefixStr + ": total_count=" + metric.count
-        + ", duration=" + (currMS - metric.prevLogMS) / 1000
-        + " secs, count=" + (currStat - metric.prevLogCount)
-        + postFix);
-    }
-    metric.prevLogCount = currStat;
-    metric.prevLogMS = currMS;
-  }
-
-  public static Map<String, Object> cloneObject(Map<String, Object> map) {
-    if (map == null) {
-      return null;
-    }
-    String jsonStr = gson.toJson(map);
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
-    return gson.fromJson(jsonStr, type);
-  }
-
-  public static Map<String, Object> toJSONObject(String jsonStr) {
-    if(jsonStr==null || jsonStr.trim().isEmpty()){
-      return new HashMap<String, Object>();
-    }
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
-    return gson.fromJson(jsonStr, type);
-  }
-
-  static public boolean logErrorMessageByInterval(String key, String message,
-                                                  Throwable e, Logger callerLogger, Level level) {
-
-    LogHistory log = logHistoryList.get(key);
-    if (log == null) {
-      log = new LogHistory();
-      logHistoryList.put(key, log);
-    }
-    if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) {
-      log.lastLogTime = System.currentTimeMillis();
-      int counter = log.counter;
-      log.counter = 0;
-      if (counter > 0) {
-        message += ". Messages suppressed before: " + counter;
-      }
-      if (e == null) {
-        callerLogger.log(level, message);
-      } else {
-        callerLogger.log(level, message, e);
-      }
-
-      return true;
-    } else {
-      log.counter++;
-    }
-    return false;
-
-  }
-
-  static public String subString(String str, int maxLength) {
-    if (str == null || str.length() == 0) {
-      return "";
-    }
-    maxLength = str.length() < maxLength ? str.length() : maxLength;
-    return str.substring(0, maxLength);
-  }
-
-  public static long genHash(String value) {
-    if (value == null) {
-      value = "null";
-    }
-    return MurmurHash.hash64A(value.getBytes(), HASH_SEED);
-  }
-
-  private static class LogHistory {
-    private long lastLogTime = 0;
-    private int counter = 0;
-  }
-
-  public static String getDate(String timeStampStr) {
-    try {
-      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
-    }
-  }
-
-  public static String getActualDateStr() {
-    try {
-      return dateFormatter.get().format(new Date());
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
-    }
-  }
-
-  public static File getFileFromClasspath(String filename) {
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
-      .getResource(filename);
-    logger.debug("File Complete URI :" + fileCompleteUrl);
-    File file = null;
-    try {
-      file = new File(fileCompleteUrl.toURI());
-    } catch (Exception exception) {
-      logger.debug(exception.getMessage(), exception.getCause());
-    }
-    return file;
-  }
-
-  public static Object getClassInstance(String classFullName, AliasUtil.ALIAS_TYPE aliasType) {
-    Object instance = null;
-    try {
-      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
-    } catch (Exception exception) {
-      logger.error("Unsupported class =" + classFullName, exception.getCause());
-    }
-    // check instance class as par aliasType
-    if (instance != null) {
-      boolean isValid = false;
-      switch (aliasType) {
-        case FILTER:
-          isValid = Filter.class.isAssignableFrom(instance.getClass());
-          break;
-        case INPUT:
-          isValid = Input.class.isAssignableFrom(instance.getClass());
-          break;
-        case OUTPUT:
-          isValid = Output.class.isAssignableFrom(instance.getClass());
-          break;
-        case MAPPER:
-          isValid = Mapper.class.isAssignableFrom(instance.getClass());
-          break;
-        default:
-          // by default consider all are valid class
-          isValid = true;
-      }
-      if (!isValid) {
-        logger.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
-      }
-    }
-    return instance;
-  }
-
-  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
-    ObjectMapper mapper = new ObjectMapper();
-    try {
-      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {
-      });
-      return jsonmap;
-    } catch (JsonParseException e) {
-      logger.error(e, e.getCause());
-    } catch (JsonMappingException e) {
-      logger.error(e, e.getCause());
-    } catch (IOException e) {
-      logger.error(e, e.getCause());
-    }
-    return new HashMap<String, Object>();
-  }
-
-  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
-    if (list != null) {
-      for (String value : list) {
-        if (value != null) {
-          if (caseSensitive) {
-            if (value.equals(str)) {
-              return true;
-            }
-          } else {
-            if (value.equalsIgnoreCase(str)) {
-              return true;
-            }
-          }
-          if (value.equalsIgnoreCase(LogFeederConstants.ALL)) {
-            return true;
-          }
-        }
-      }
-    }
-    return false;
-  }
-  
-  
-  private static synchronized String setHostNameAndIP() {
-    if (hostName == null || ipAddress == null) {
-      try {
-        InetAddress ip = InetAddress.getLocalHost();
-        ipAddress = ip.getHostAddress();
-        String getHostName = ip.getHostName();
-        String getCanonicalHostName = ip.getCanonicalHostName();
-        if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
-          logger.info("Using getCanonicalHostName()=" + getCanonicalHostName);
-          hostName = getCanonicalHostName;
-        } else {
-          logger.info("Using getHostName()=" + getHostName);
-          hostName = getHostName;
-        }
-        logger.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName
-            + ", getCanonicalHostName=" + getCanonicalHostName + ", hostName="
-            + hostName);
-      } catch (UnknownHostException e) {
-        logger.error("Error getting hostname.", e);
-      }
-    }
-    return hostName;
-  }
-
-  public static String[] mergeArray(String[] first, String[] second) {
-    if (first == null) {
-      first = new String[0];
-    }
-    if (second == null) {
-      second = new String[0];
-    }
-    String[] mergedArray = ObjectArrays.concat(first, second, String.class);
-    return mergedArray;
-  }
-  
-  public static String getLogfeederTempDir() {
-    if (logfeederTempDir == null) {
-      synchronized (_LOCK) {
-        if (logfeederTempDir == null) {
-          String tempDirValue = getStringProperty("logfeeder.tmp.dir",
-              "/tmp/$username/logfeeder/");
-          HashMap<String, String> contextParam = new HashMap<String, String>();
-          String username = System.getProperty("user.name");
-          contextParam.put("username", username);
-          logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue,
-              contextParam);
-        }
-      }
-    }
-    return logfeederTempDir;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java
deleted file mode 100644
index 9bb1564..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricCount.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-public class MetricCount {
-  public String metricsName = null;
-  public boolean isPointInTime = false;
-
-  public long count = 0;
-  public long prevLogCount = 0;
-  public long prevLogMS = System.currentTimeMillis();
-  public long prevPublishCount = 0;
-  public int publishCount = 0; // Count of published metrics. Used for first time sending metrics
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java
deleted file mode 100644
index b2a7786..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MetricsMgr.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.TreeMap;
-
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.apache.log4j.Logger;
-
-public class MetricsMgr {
-  private static final Logger logger = Logger.getLogger(MetricsMgr.class);
-
-  private boolean isMetricsEnabled = false;
-  private String nodeHostName = null;
-  private String appId = "logfeeder";
-
-  private long lastPublishTimeMS = 0; // Let's do the first publish immediately
-  private long lastFailedPublishTimeMS = System.currentTimeMillis(); // Reset the clock
-
-  private int publishIntervalMS = 60 * 1000;
-  private int maxMetricsBuffer = 60 * 60 * 1000; // If AMS is down, we should not keep
-  // the metrics in memory forever
-  private HashMap<String, TimelineMetric> metricsMap = new HashMap<String, TimelineMetric>();
-  private LogFeederAMSClient amsClient = null;
-
-  public void init() {
-    logger.info("Initializing MetricsMgr()");
-    amsClient = new LogFeederAMSClient();
-
-    if (amsClient.getCollectorUri(null) != null) {
-      nodeHostName = LogFeederUtil.getStringProperty("node.hostname");
-      if (nodeHostName == null) {
-        try {
-          nodeHostName = InetAddress.getLocalHost().getHostName();
-        } catch (Throwable e) {
-          logger.warn(
-            "Error getting hostname using InetAddress.getLocalHost().getHostName()",
-            e);
-        }
-        if (nodeHostName == null) {
-          try {
-            nodeHostName = InetAddress.getLocalHost()
-              .getCanonicalHostName();
-          } catch (Throwable e) {
-            logger.warn(
-              "Error getting hostname using InetAddress.getLocalHost().getCanonicalHostName()",
-              e);
-          }
-        }
-      }
-      if (nodeHostName == null) {
-        isMetricsEnabled = false;
-        logger.error("Failed getting hostname for node. Disabling publishing LogFeeder metrics");
-      } else {
-        isMetricsEnabled = true;
-        logger.info("LogFeeder Metrics is enabled. Metrics host="
-          + amsClient.getCollectorUri(null));
-      }
-    } else {
-      logger.info("LogFeeder Metrics publish is disabled");
-    }
-  }
-
-  public boolean isMetricsEnabled() {
-    return isMetricsEnabled;
-  }
-
-  synchronized public void useMetrics(List<MetricCount> metricsList) {
-    if (!isMetricsEnabled) {
-      return;
-    }
-    logger.info("useMetrics() metrics.size=" + metricsList.size());
-    long currMS = System.currentTimeMillis();
-    Long currMSLong = new Long(currMS);
-    for (MetricCount metric : metricsList) {
-      if (metric.metricsName == null) {
-        logger.debug("metric.metricsName is null");
-        // Metrics is not meant to be published
-        continue;
-      }
-      long currCount = metric.count;
-      if (!metric.isPointInTime && metric.publishCount > 0
-        && currCount <= metric.prevPublishCount) {
-        // No new data added, so let's ignore it
-        logger.debug("Nothing changed. " + metric.metricsName
-          + ", currCount=" + currCount + ", prevPublishCount="
-          + metric.prevPublishCount);
-        continue;
-      }
-      metric.publishCount++;
-
-      TimelineMetric timelineMetric = metricsMap.get(metric.metricsName);
-      if (timelineMetric == null) {
-        logger.debug("Creating new metric obbject for "
-          + metric.metricsName);
-        // First time for this metric
-        timelineMetric = new TimelineMetric();
-        timelineMetric.setMetricName(metric.metricsName);
-        timelineMetric.setHostName(nodeHostName);
-        timelineMetric.setAppId(appId);
-        timelineMetric.setStartTime(currMS);
-        timelineMetric.setType("Long");
-        timelineMetric.setMetricValues(new TreeMap<Long, Double>());
-
-        metricsMap.put(metric.metricsName, timelineMetric);
-      }
-      logger.debug("Adding metrics=" + metric.metricsName);
-      if (metric.isPointInTime) {
-        timelineMetric.getMetricValues().put(currMSLong,
-          new Double(currCount));
-      } else {
-        Double value = timelineMetric.getMetricValues().get(currMSLong);
-        if (value == null) {
-          value = new Double(0);
-        }
-        value += (currCount - metric.prevPublishCount);
-        timelineMetric.getMetricValues().put(currMSLong, value);
-        metric.prevPublishCount = currCount;
-      }
-    }
-
-    if (metricsMap.size() > 0
-      && currMS - lastPublishTimeMS > publishIntervalMS) {
-      try {
-        // Time to publish
-        TimelineMetrics timelineMetrics = new TimelineMetrics();
-        List<TimelineMetric> timeLineMetricList = new ArrayList<TimelineMetric>();
-        timeLineMetricList.addAll(metricsMap.values());
-        timelineMetrics.setMetrics(timeLineMetricList);
-        amsClient.emitMetrics(timelineMetrics);
-        logger.info("Published " + timeLineMetricList.size()
-          + " metrics to AMS");
-        metricsMap.clear();
-        timeLineMetricList.clear();
-        lastPublishTimeMS = currMS;
-      } catch (Throwable t) {
-        logger.warn("Error sending metrics to AMS.", t);
-        if (currMS - lastFailedPublishTimeMS > maxMetricsBuffer) {
-          logger.error("AMS was not sent for last "
-            + maxMetricsBuffer
-            / 1000
-            + " seconds. Purging it and will start rebuilding it again");
-          metricsMap.clear();
-          lastFailedPublishTimeMS = currMS;
-        }
-      }
-    } else {
-      logger.info("Not publishing metrics. metrics.size()="
-        + metricsMap.size() + ", lastPublished="
-        + (currMS - lastPublishTimeMS) / 1000
-        + " seconds ago, intervalConfigured=" + publishIntervalMS
-        / 1000);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java
deleted file mode 100644
index 2a54f28..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/MurmurHash.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import com.google.common.primitives.Ints;
-
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-
-/**
- * This is a very fast, non-cryptographic hash suitable for general hash-based
- * lookup.  See http://murmurhash.googlepages.com/ for more details.
- * <p/>
- * <p>The C version of MurmurHash 2.0 found at that site was ported
- * to Java by Andrzej Bialecki (ab at getopt org).</p>
- */
-public final class MurmurHash {
-
-  private MurmurHash() {
-  }
-
-  /**
-   * Hashes an int.
-   *
-   * @param data The int to hash.
-   * @param seed The seed for the hash.
-   * @return The 32 bit hash of the bytes in question.
-   */
-  public static int hash(int data, int seed) {
-    return hash(ByteBuffer.wrap(Ints.toByteArray(data)), seed);
-  }
-
-  /**
-   * Hashes bytes in an array.
-   *
-   * @param data The bytes to hash.
-   * @param seed The seed for the hash.
-   * @return The 32 bit hash of the bytes in question.
-   */
-  public static int hash(byte[] data, int seed) {
-    return hash(ByteBuffer.wrap(data), seed);
-  }
-
-  /**
-   * Hashes bytes in part of an array.
-   *
-   * @param data   The data to hash.
-   * @param offset Where to start munging.
-   * @param length How many bytes to process.
-   * @param seed   The seed to start with.
-   * @return The 32-bit hash of the data in question.
-   */
-  public static int hash(byte[] data, int offset, int length, int seed) {
-    return hash(ByteBuffer.wrap(data, offset, length), seed);
-  }
-
-  /**
-   * Hashes the bytes in a buffer from the current position to the limit.
-   *
-   * @param buf  The bytes to hash.
-   * @param seed The seed for the hash.
-   * @return The 32 bit murmur hash of the bytes in the buffer.
-   */
-  public static int hash(ByteBuffer buf, int seed) {
-    // save byte order for later restoration
-    ByteOrder byteOrder = buf.order();
-    buf.order(ByteOrder.LITTLE_ENDIAN);
-
-    int m = 0x5bd1e995;
-    int r = 24;
-
-    int h = seed ^ buf.remaining();
-
-    while (buf.remaining() >= 4) {
-      int k = buf.getInt();
-
-      k *= m;
-      k ^= k >>> r;
-      k *= m;
-
-      h *= m;
-      h ^= k;
-    }
-
-    if (buf.remaining() > 0) {
-      ByteBuffer finish = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN);
-      // for big-endian version, use this first:
-      // finish.position(4-buf.remaining());
-      finish.put(buf).rewind();
-      h ^= finish.getInt();
-      h *= m;
-    }
-
-    h ^= h >>> 13;
-    h *= m;
-    h ^= h >>> 15;
-
-    buf.order(byteOrder);
-    return h;
-  }
-
-
-  public static long hash64A(byte[] data, int seed) {
-    return hash64A(ByteBuffer.wrap(data), seed);
-  }
-
-  public static long hash64A(byte[] data, int offset, int length, int seed) {
-    return hash64A(ByteBuffer.wrap(data, offset, length), seed);
-  }
-
-  public static long hash64A(ByteBuffer buf, int seed) {
-    ByteOrder byteOrder = buf.order();
-    buf.order(ByteOrder.LITTLE_ENDIAN);
-
-    long m = 0xc6a4a7935bd1e995L;
-    int r = 47;
-
-    long h = seed ^ (buf.remaining() * m);
-
-    while (buf.remaining() >= 8) {
-      long k = buf.getLong();
-
-      k *= m;
-      k ^= k >>> r;
-      k *= m;
-
-      h ^= k;
-      h *= m;
-    }
-
-    if (buf.remaining() > 0) {
-      ByteBuffer finish = ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN);
-      // for big-endian version, do this first:
-      // finish.position(8-buf.remaining());
-      finish.put(buf).rewind();
-      h ^= finish.getLong();
-      h *= m;
-    }
-
-    h ^= h >>> r;
-    h *= m;
-    h ^= h >>> r;
-
-    buf.order(byteOrder);
-    return h;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
deleted file mode 100644
index 41b005b..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/OutputMgr.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logfeeder;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.logconfig.filter.FilterLogData;
-import org.apache.ambari.logfeeder.output.Output;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-
-public class OutputMgr {
-  private static final Logger logger = Logger.getLogger(OutputMgr.class);
-
-  private Collection<Output> outputList = new ArrayList<Output>();
-
-  private boolean addMessageMD5 = true;
-
-  private int MAX_OUTPUT_SIZE = 32765; // 32766-1
-  private static long doc_counter = 0;
-  private MetricCount messageTruncateMetric = new MetricCount();
-
-  
-  public Collection<Output> getOutputList() {
-    return outputList;
-  }
-
-  public void setOutputList(Collection<Output> outputList) {
-    this.outputList = outputList;
-  }
-
-  public void write(Map<String, Object> jsonObj, InputMarker inputMarker) {
-    Input input = inputMarker.input;
-
-    // Update the block with the context fields
-    for (Map.Entry<String, String> entry : input.getContextFields()
-      .entrySet()) {
-      if (jsonObj.get(entry.getKey()) == null) {
-        jsonObj.put(entry.getKey(), entry.getValue());
-      }
-    }
-
-    // TODO: Ideally most of the overrides should be configurable
-
-    // Add the input type
-    if (jsonObj.get("type") == null) {
-      jsonObj.put("type", input.getStringValue("type"));
-    }
-    if (jsonObj.get("path") == null && input.getFilePath() != null) {
-      jsonObj.put("path", input.getFilePath());
-    }
-    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
-      jsonObj.put("path", input.getStringValue("path"));
-    }
-
-    // Add host if required
-    if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
-      jsonObj.put("host", LogFeederUtil.hostName);
-    }
-    // Add IP if required
-    if (jsonObj.get("ip") == null && LogFeederUtil.ipAddress != null) {
-      jsonObj.put("ip", LogFeederUtil.ipAddress);
-    }
-    
-    //Add level
-    if (jsonObj.get("level") == null) {
-      jsonObj.put("level", LogFeederConstants.LOG_LEVEL_UNKNOWN);
-    }
-    if (input.isUseEventMD5() || input.isGenEventMD5()) {
-      String prefix = "";
-      Object logtimeObj = jsonObj.get("logtime");
-      if (logtimeObj != null) {
-        if (logtimeObj instanceof Date) {
-          prefix = "" + ((Date) logtimeObj).getTime();
-        } else {
-          prefix = logtimeObj.toString();
-        }
-      }
-      Long eventMD5 = LogFeederUtil.genHash(LogFeederUtil.getGson()
-        .toJson(jsonObj));
-      if (input.isGenEventMD5()) {
-        jsonObj.put("event_md5", prefix + eventMD5.toString());
-      }
-      if (input.isUseEventMD5()) {
-        jsonObj.put("id", prefix + eventMD5.toString());
-      }
-    }
-
-    // jsonObj.put("@timestamp", new Date());
-    jsonObj.put("seq_num", new Long(doc_counter++));
-    if (jsonObj.get("id") == null) {
-      jsonObj.put("id", UUID.randomUUID().toString());
-    }
-    if (jsonObj.get("event_count") == null) {
-      jsonObj.put("event_count", new Integer(1));
-    }
-    if (inputMarker.lineNumber > 0) {
-      jsonObj.put("logfile_line_number", new Integer(
-        inputMarker.lineNumber));
-    }
-    if (jsonObj.containsKey("log_message")) {
-      // TODO: Let's check size only for log_message for now
-      String logMessage = (String) jsonObj.get("log_message");
-      if (logMessage != null
-        && logMessage.getBytes().length > MAX_OUTPUT_SIZE) {
-        messageTruncateMetric.count++;
-        final String LOG_MESSAGE_KEY = this.getClass().getSimpleName()
-          + "_MESSAGESIZE";
-        LogFeederUtil.logErrorMessageByInterval(LOG_MESSAGE_KEY,
-          "Message is too big. size="
-            + logMessage.getBytes().length + ", input="
-            + input.getShortDescription()
-            + ". Truncating to " + MAX_OUTPUT_SIZE
-            + ", first upto 100 characters="
-            + LogFeederUtil.subString(logMessage, 100),
-          null, logger, Level.WARN);
-        logMessage = new String(logMessage.getBytes(), 0,
-          MAX_OUTPUT_SIZE);
-        jsonObj.put("log_message", logMessage);
-        // Add error tags
-        @SuppressWarnings("unchecked")
-        List<String> tagsList = (List<String>) jsonObj.get("tags");
-        if (tagsList == null) {
-          tagsList = new ArrayList<String>();
-          jsonObj.put("tags", tagsList);
-        }
-        tagsList.add("error_message_truncated");
-
-      }
-      if (addMessageMD5) {
-        jsonObj.put("message_md5",
-          "" + LogFeederUtil.genHash(logMessage));
-      }
-    }
-    //check log is allowed to send output
-    if (FilterLogData.INSTANCE.isAllowed(jsonObj)) {
-      for (Output output : input.getOutputList()) {
-        try {
-          output.write(jsonObj, inputMarker);
-        } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
-        }
-      }
-    }
-  }
-
-  public void write(String jsonBlock, InputMarker inputMarker) {
-    //check log is allowed to send output
-    if (FilterLogData.INSTANCE.isAllowed(jsonBlock)) {
-      for (Output output : inputMarker.input.getOutputList()) {
-        try {
-          output.write(jsonBlock, inputMarker);
-        } catch (Exception e) {
-          logger.error("Error writing. to " + output.getShortDescription(), e);
-        }
-      }
-    }
-  }
-
-  public void close() {
-    logger.info("Close called for outputs ...");
-    for (Output output : outputList) {
-      try {
-        output.setDrain(true);
-        output.close();
-      } catch (Exception e) {
-        // Ignore
-      }
-    }
-    // Need to get this value from property
-    int iterations = 30;
-    int waitTimeMS = 1000;
-    int i;
-    boolean allClosed = true;
-    for (i = 0; i < iterations; i++) {
-      allClosed = true;
-      for (Output output : outputList) {
-        if (!output.isClosed()) {
-          try {
-            allClosed = false;
-            logger.warn("Waiting for output to close. "
-              + output.getShortDescription() + ", "
-              + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
-          }
-        }
-      }
-      if (allClosed) {
-        break;
-      }
-    }
-
-    if (!allClosed) {
-      logger.warn("Some outpus were not closed. Iterations=" + i);
-      for (Output output : outputList) {
-        if (!output.isClosed()) {
-          logger.warn("Output not closed. Will ignore it."
-            + output.getShortDescription() + ", pendingCound="
-            + output.getPendingCount());
-        }
-      }
-    } else {
-      logger.info("All outputs are closed. Iterations=" + i);
-    }
-  }
-
-  public void logStats() {
-    for (Output output : outputList) {
-      output.logStat();
-    }
-    LogFeederUtil.logStatForMetric(messageTruncateMetric,
-      "Stat: Messages Truncated", null);
-  }
-
-  public void addMetricsContainers(List<MetricCount> metricsList) {
-    metricsList.add(messageTruncateMetric);
-    for (Output output : outputList) {
-      output.addMetricsContainers(metricsList);
-    }
-  }
-
-  
-  public void copyFile(File inputFile, InputMarker inputMarker) {
-    Input input = inputMarker.input;
-    for (Output output : input.getOutputList()) {
-      try {
-        output.copyFile(inputFile, inputMarker);
-      }catch (Exception e) {
-        logger.error("Error coyping file . to " + output.getShortDescription(),
-            e);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/df3de10d/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
new file mode 100644
index 0000000..287982f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigBlock.java
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.common;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logfeeder.metrics.MetricCount;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+import org.apache.log4j.Priority;
+
+
+public abstract class ConfigBlock {
+  static private Logger logger = Logger.getLogger(ConfigBlock.class);
+
+  private boolean drain = false;
+
+  protected Map<String, Object> configs;
+  protected Map<String, String> contextFields = new HashMap<String, String>();
+  public MetricCount statMetric = new MetricCount();
+
+  /**
+   *
+   */
+  public ConfigBlock() {
+    super();
+  }
+
+  /**
+   * Used while logging. Keep it short and meaningful
+   */
+  public abstract String getShortDescription();
+
+  /**
+   * Every implementor need to give name to the thread they create
+   */
+  public String getNameForThread() {
+    return this.getClass().getSimpleName();
+  }
+
+  /**
+   * @param metricsList
+   */
+  public void addMetricsContainers(List<MetricCount> metricsList) {
+    metricsList.add(statMetric);
+  }
+
+  /**
+   * This method needs to be overwritten by deriving classes.
+   */
+  public void init() throws Exception {
+  }
+
+  public void loadConfig(Map<String, Object> map) {
+    configs = LogFeederUtil.cloneObject(map);
+
+    Map<String, String> nvList = getNVList("add_fields");
+    if (nvList != null) {
+      contextFields.putAll(nvList);
+    }
+  }
+
+  public Map<String, Object> getConfigs() {
+    return configs;
+  }
+
+  @SuppressWarnings("unchecked")
+  public boolean isEnabled() {
+    boolean isEnabled = getBooleanValue("is_enabled", true);
+    if (isEnabled) {
+      // Let's check for static conditions
+      Map<String, Object> conditions = (Map<String, Object>) configs
+        .get("conditions");
+      boolean allow = true;
+      if (conditions != null && conditions.size() > 0) {
+        allow = false;
+        for (String conditionType : conditions.keySet()) {
+          if (conditionType.equalsIgnoreCase("fields")) {
+            Map<String, Object> fields = (Map<String, Object>) conditions
+              .get("fields");
+            for (String fieldName : fields.keySet()) {
+              Object values = fields.get(fieldName);
+              if (values instanceof String) {
+                allow = isFieldConditionMatch(fieldName,
+                  (String) values);
+              } else {
+                List<String> listValues = (List<String>) values;
+                for (String stringValue : listValues) {
+                  allow = isFieldConditionMatch(fieldName,
+                    stringValue);
+                  if (allow) {
+                    break;
+                  }
+                }
+              }
+              if (allow) {
+                break;
+              }
+            }
+          }
+          if (allow) {
+            break;
+          }
+        }
+        isEnabled = allow;
+      }
+    }
+    return isEnabled;
+  }
+
+  public boolean isFieldConditionMatch(String fieldName, String stringValue) {
+    boolean allow = false;
+    String fieldValue = (String) configs.get(fieldName);
+    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
+      allow = true;
+    } else {
+      @SuppressWarnings("unchecked")
+      Map<String, Object> addFields = (Map<String, Object>) configs
+        .get("add_fields");
+      if (addFields != null && addFields.get(fieldName) != null) {
+        String addFieldValue = (String) addFields.get(fieldName);
+        if (stringValue.equalsIgnoreCase(addFieldValue)) {
+          allow = true;
+        }
+      }
+
+    }
+    return allow;
+  }
+
+  @SuppressWarnings("unchecked")
+  public Map<String, String> getNVList(String key) {
+    return (Map<String, String>) configs.get(key);
+  }
+
+  public String getStringValue(String key) {
+    Object value = configs.get(key);
+    if (value != null && value.toString().equalsIgnoreCase("none")) {
+      value = null;
+    }
+    if (value != null) {
+      return value.toString();
+    }
+    return null;
+  }
+
+  public String getStringValue(String key, String defaultValue) {
+    Object value = configs.get(key);
+    if (value != null && value.toString().equalsIgnoreCase("none")) {
+      value = null;
+    }
+
+    if (value != null) {
+      return value.toString();
+    }
+    return defaultValue;
+  }
+
+  public Object getConfigValue(String key) {
+    return configs.get(key);
+  }
+
+  public boolean getBooleanValue(String key, boolean defaultValue) {
+    String strValue = getStringValue(key);
+    boolean retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      if (strValue.equalsIgnoreCase("true")
+        || strValue.equalsIgnoreCase("yes")) {
+        retValue = true;
+      } else {
+        retValue = false;
+      }
+    }
+    return retValue;
+  }
+
+  public int getIntValue(String key, int defaultValue) {
+    String strValue = getStringValue(key);
+    int retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      try {
+        retValue = Integer.parseInt(strValue);
+      } catch (Throwable t) {
+        logger.error("Error parsing integer value. key=" + key
+          + ", value=" + strValue);
+      }
+    }
+    return retValue;
+  }
+  
+  public long getLongValue(String key, long defaultValue) {
+    String strValue = getStringValue(key);
+    Long retValue = defaultValue;
+    if (!StringUtils.isEmpty(strValue)) {
+      try {
+        retValue = Long.parseLong(strValue);
+      } catch (Throwable t) {
+        logger.error("Error parsing long value. key=" + key + ", value="
+            + strValue);
+      }
+    }
+    return retValue;
+  }
+
+  public Map<String, String> getContextFields() {
+    return contextFields;
+  }
+
+  public void incrementStat(int count) {
+    statMetric.count += count;
+  }
+
+  public void logStatForMetric(MetricCount metric, String prefixStr) {
+    LogFeederUtil.logStatForMetric(metric, prefixStr, ", key="
+      + getShortDescription());
+  }
+
+  synchronized public void logStat() {
+    logStatForMetric(statMetric, "Stat");
+  }
+
+  public boolean logConfgs(Priority level) {
+    if (level.toInt() == Priority.INFO_INT && !logger.isInfoEnabled()) {
+      return false;
+    }
+    if (level.toInt() == Priority.DEBUG_INT && !logger.isDebugEnabled()) {
+      return false;
+    }
+    logger.log(level, "Printing configuration Block="
+      + getShortDescription());
+    logger.log(level, "configs=" + configs);
+    logger.log(level, "contextFields=" + contextFields);
+    return true;
+  }
+
+  public boolean isDrain() {
+    return drain;
+  }
+
+  public void setDrain(boolean drain) {
+    this.drain = drain;
+  }
+}


[41/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code - Part 2 (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
deleted file mode 100644
index 53e2ca2..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.solr.metrics;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.TreeMap;
-
-import javax.management.MalformedObjectNameException;
-
-import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.solr.AmbariSolrCloudClient;
-import org.apache.ambari.logsearch.solr.AmbariSolrCloudClientBuilder;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SolrMetricsLoader extends TimerTask {
-  private static final Logger LOG = LoggerFactory.getLogger(SolrMetricsLoader.class);
-
-  private static final int RETRY = 3;
-  private static final int MAX_METRIC_SIZE= 1000;
-
-  private final String solrHost;
-  private final SolrJmxAdapter solrJmxAdapter;
-  private final SolrAmsClient solrAmsClient;
-  
-  private final TimelineMetrics metrics = new TimelineMetrics();
-
-  public SolrMetricsLoader(String solrHost, int solrJmxPort, String collectorHost) throws IOException {
-    this.solrHost = solrHost;
-    this.solrJmxAdapter = new SolrJmxAdapter(solrHost, solrJmxPort);
-    this.solrAmsClient = new SolrAmsClient(collectorHost);
-
-    solrJmxAdapter.connect();
-  }
-
-  @Override
-  public void run() {
-    LOG.info("Loading Solr Metrics for the host " + solrHost);
-
-    addCpuUsageMetric();
-    addHeapMemoryUsageMetric();
-    addIndexSizeMetric();
-
-    emitMetrics();
-    removeOverTheLimitMetrics();
-  }
-
-  private void addCpuUsageMetric() {
-    Exception lastException = null;
-    for (int retries = 0; retries < RETRY; retries++) {
-      try {
-        double processCpuLoad = solrJmxAdapter.getProcessCpuLoad();
-        addMetric("logsearch.solr.cpu.usage", "Float", processCpuLoad);
-        return;
-      } catch (MalformedObjectNameException e) {
-        lastException = e;
-        try {
-          solrJmxAdapter.reConnect();
-        } catch (IOException e1) {
-        }
-      }
-    }
-
-    LOG.info("Could not load solr cpu usage metric, last exception:", lastException);
-  }
-
-  private void addHeapMemoryUsageMetric() {
-    Exception lastException = null;
-    for (int retries = 0; retries < RETRY; retries++) {
-      try {
-        Map<String, Long> memoryData = solrJmxAdapter.getMemoryData();
-        addMetric("jvm.JvmMetrics.MemHeapUsedM", "Long", memoryData.get("heapMemoryUsed").doubleValue() / 1024 / 1024);
-        addMetric("jvm.JvmMetrics.MemHeapCommittedM", "Long", memoryData.get("heapMemoryCommitted").doubleValue() / 1024 / 1024);
-        addMetric("jvm.JvmMetrics.MemHeapMaxM", "Long", memoryData.get("heapMemoryMax").doubleValue() / 1024 / 1024);
-        addMetric("jvm.JvmMetrics.MemNonHeapUsedM", "Long", memoryData.get("nonHeapMemoryUsed").doubleValue() / 1024 / 1024);
-        addMetric("jvm.JvmMetrics.MemNonHeapCommittedM", "Long", memoryData.get("nonHeapMemoryCommitted").doubleValue() / 1024 / 1024);
-        addMetric("jvm.JvmMetrics.MemNonHeapMaxM", "Long", memoryData.get("nonHeapMemoryMax").doubleValue() / 1024 / 1024);
-        return;
-      } catch (MalformedObjectNameException e) {
-        lastException = e;
-        try {
-          solrJmxAdapter.reConnect();
-        } catch (IOException e1) {
-        }
-      }
-    }
-
-    LOG.info("Could not load solr heap memory usage metric, last exception:", lastException);
-  }
-
-  private void addIndexSizeMetric() {
-    Exception lastException = null;
-    for (int retries = 0; retries < RETRY; retries++) {
-      try {
-        double indexSize = solrJmxAdapter.getIndexSize();
-        addMetric("logsearch.solr.index.size", "Long", indexSize / 1024 / 1024 / 1024);
-        return;
-      } catch (Exception e) {
-        lastException = e;
-        try {
-          solrJmxAdapter.reConnect();
-        } catch (IOException e1) {
-        }
-      }
-    }
-
-    LOG.info("Could not load solr index size metric, last exception:", lastException);
-  }
-
-  private void addMetric(String metricName, String type, Double value) {
-    Long currMS = System.currentTimeMillis();
-
-    TimelineMetric metric = new TimelineMetric();
-    metric.setMetricName(metricName);
-    metric.setHostName(solrHost);
-    metric.setAppId("infra-solr");
-    metric.setStartTime(currMS);
-    metric.setType(type);
-    metric.setTimestamp(currMS);
-    metric.getMetricValues().put(currMS, value);
-
-    metrics.addOrMergeTimelineMetric(metric);
-  }
-
-  private void emitMetrics() {
-    Exception lastException = null;
-    for (int retries = 0; retries < RETRY; retries++) {
-      try {
-        if (solrAmsClient.emitMetrics(metrics)) {
-          metrics.getMetrics().clear();
-          return;
-        }
-      } catch (Exception e) {
-        lastException = e;
-      }
-    }
-
-    LOG.info("Could not emit metrics, last exception:", lastException);
-  }
-
-  private void removeOverTheLimitMetrics() {
-    for (TimelineMetric metric : metrics.getMetrics()) {
-      TreeMap<Long, Double> metricValues = metric.getMetricValues();
-      while (metricValues.size() > MAX_METRIC_SIZE) {
-        metricValues.remove(metricValues.firstKey());
-      }
-    }
-  }
-
-  public static void startSolrMetricsLoaderTasks() {
-    try {
-      String collectorHosts = PropertiesHelper.getProperty("logsearch.solr.metrics.collector.hosts");
-      if (StringUtils.isEmpty(collectorHosts)) {
-        LOG.warn("No Ambari Metrics service is available, no Solr metrics will be loaded!");
-        return;
-      }
-
-      int solrJmxPort = PropertiesHelper.getIntProperty("logsearch.solr.jmx.port");
-
-      String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
-      AmbariSolrCloudClient ambariSolrCloudClient = new AmbariSolrCloudClientBuilder()
-          .withZkConnectString(zkConnectString)
-          .build();
-
-      Collection<String> solrHosts = ambariSolrCloudClient.getSolrHosts();
-      for (String solrHost : solrHosts) {
-        SolrMetricsLoader sml = new SolrMetricsLoader(solrHost, solrJmxPort, collectorHosts);
-        Timer timer = new Timer("Solr Metrics Loader - " + solrHost, true);
-        timer.scheduleAtFixedRate(sml, 0, 10000);
-      }
-    } catch (Exception e) {
-      LOG.warn("Could not start solr metric loader tasks", e);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
index a3f59f7..0cf05a6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
@@ -69,7 +69,7 @@ public class SolrAuditLogData extends SolrCommonLogData implements AuditLogData
   private String repo;
 
   @Field("repoType")
-  private String repoType;
+  private Integer repoType;
 
   @Field("reqData")
   private String requestData;
@@ -188,12 +188,12 @@ public class SolrAuditLogData extends SolrCommonLogData implements AuditLogData
   }
 
   @Override
-  public String getRepoType() {
+  public Integer getRepoType() {
     return repoType;
   }
 
   @Override
-  public void setRepoType(String repoType) {
+  public void setRepoType(Integer repoType) {
     this.repoType = repoType;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
index 5da1c97..c20e383 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/authenticate/LogsearchLogoutSuccessHandler.java
@@ -36,6 +36,6 @@ public class LogsearchLogoutSuccessHandler extends SimpleUrlLogoutSuccessHandler
     public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication)
       throws IOException, ServletException {
       logger.debug("LogsearchLogoutSuccessHandler ::: onLogoutSuccess");
-      response.sendRedirect("/login.html");
+      response.sendRedirect("/index.html");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
index 1b24c06..1831697 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchAuthenticationEntryPoint.java
@@ -28,7 +28,7 @@ import org.apache.log4j.Logger;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.LoginUrlAuthenticationEntryPoint;
 
-class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEntryPoint {
+public class LogsearchAuthenticationEntryPoint extends LoginUrlAuthenticationEntryPoint {
   private static final Logger logger = Logger.getLogger(LogsearchAuthenticationEntryPoint.class);
 
   public LogsearchAuthenticationEntryPoint(String loginFormUrl) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
index 29fd5b2..8cd435b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
@@ -61,6 +61,7 @@ import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHa
 import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.springframework.security.web.authentication.WebAuthenticationDetails;
+import org.springframework.stereotype.Component;
 
 public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
   private static final Logger logger = LoggerFactory.getLogger(LogsearchKRBAuthenticationFilter.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
index a34fbd0..1320278 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchSecurityContextFormationFilter.java
@@ -38,6 +38,7 @@ import org.apache.log4j.Logger;
 import org.springframework.security.authentication.AnonymousAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.stereotype.Component;
 import org.springframework.web.filter.GenericFilterBean;
 
 public class LogsearchSecurityContextFormationFilter extends GenericFilterBean {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
index 2c83001..85688a2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchUsernamePasswordAuthenticationFilter.java
@@ -20,11 +20,13 @@ package org.apache.ambari.logsearch.web.filters;
 
 import java.io.IOException;
 
+import javax.inject.Inject;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.log4j.Logger;
+import org.springframework.security.authentication.AuthenticationManager;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.security.web.authentication.RememberMeServices;
 import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
deleted file mode 100644
index fc6a594..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/listeners/SpringEventListener.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.web.listeners;
-
-import org.apache.log4j.Logger;
-import org.springframework.context.ApplicationListener;
-import org.springframework.security.authentication.event.AbstractAuthenticationEvent;
-
-public class SpringEventListener implements ApplicationListener<AbstractAuthenticationEvent> {
-  private static final Logger logger = Logger.getLogger(SpringEventListener.class);
-
-  @Override
-  public void onApplicationEvent(AbstractAuthenticationEvent event) {
-    logger.trace(" Inside onApplicationEvent  SpringEventListener");
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
index 0c102c3..eab33a1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAbstractAuthenticationProvider.java
@@ -65,14 +65,4 @@ public abstract class LogsearchAbstractAuthenticationProvider implements Authent
     return isEnable;
   }
 
-  /**
-   * Check authentication provider is enable or disable
-   * 
-   * @return boolean
-   */
-  public boolean isEnable() {
-    // default is disabled
-    return false;
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
index 3534818..05104b4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchAuthenticationProvider.java
@@ -26,7 +26,6 @@ import org.apache.log4j.Logger;
 import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 import org.springframework.security.core.Authentication;
 import org.springframework.security.core.AuthenticationException;
-import org.springframework.security.core.userdetails.UserDetailsService;
 import org.springframework.security.web.authentication.WebAuthenticationDetails;
 import org.springframework.stereotype.Component;
 
@@ -41,19 +40,19 @@ public class LogsearchAuthenticationProvider extends
     .getLogger("org.apache.ambari.logsearch.audit");
 
   @Inject
-  UserDao userDao;
+  private UserDao userDao;
 
   @Inject
-  LogsearchLdapAuthenticationProvider ldapAuthenticationProvider;
+  private LogsearchLdapAuthenticationProvider ldapAuthenticationProvider;
 
   @Inject
-  LogsearchFileAuthenticationProvider fileAuthenticationProvider;
+  private LogsearchFileAuthenticationProvider fileAuthenticationProvider;
 
   @Inject
-  LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
+  private LogsearchSimpleAuthenticationProvider simpleAuthenticationProvider;
 
   @Inject
-  LogsearchExternalServerAuthenticationProvider externalServerAuthenticationProvider;
+  private LogsearchExternalServerAuthenticationProvider externalServerAuthenticationProvider;
 
   @Override
   public Authentication authenticate(Authentication authentication)
@@ -128,7 +127,7 @@ public class LogsearchAuthenticationProvider extends
       authentication = fileAuthenticationProvider.authenticate(authentication);
     } else if (authMethod.equals(AUTH_METHOD.SIMPLE)) {
       authentication = simpleAuthenticationProvider.authenticate(authentication);
-    }else if (authMethod.equals(AUTH_METHOD.EXTERNAL_AUTH)) {
+    } else if (authMethod.equals(AUTH_METHOD.EXTERNAL_AUTH)) {
       authentication = externalServerAuthenticationProvider.authenticate(authentication);
     } else {
       logger.error("Invalid authentication method :" + authMethod.name());

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
index a06a381..d398bdc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchExternalServerAuthenticationProvider.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logsearch.web.security;
 import java.util.ArrayList;
 import java.util.List;
 
-import javax.annotation.PostConstruct;
 import javax.inject.Inject;
 
 import org.apache.ambari.logsearch.common.ExternalServerClient;
@@ -114,7 +113,7 @@ public class LogsearchExternalServerAuthenticationProvider extends
   @Override
   public Authentication authenticate(Authentication authentication)
       throws AuthenticationException {
-    if (!this.isEnable()) {
+    if (!authConfig.isAuthExternalEnabled()) {
       LOG.debug("external server auth is disabled.");
       return authentication;
     }
@@ -173,13 +172,4 @@ public class LogsearchExternalServerAuthenticationProvider extends
     }
     return false;
   }
-
-  /**
-   * Return true/false based on EXTERNAL_AUTH authentication method is
-   * enabled/disabled return boolean
-   */
-  @Override
-  public boolean isEnable() {
-    return isEnable(AUTH_METHOD.EXTERNAL_AUTH);
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
index 180de31..9662266 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchFileAuthenticationProvider.java
@@ -20,6 +20,7 @@ package org.apache.ambari.logsearch.web.security;
 
 import java.util.Collection;
 
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.ambari.logsearch.util.CommonUtil;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
@@ -41,11 +42,14 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
   private static Logger logger = Logger.getLogger(LogsearchFileAuthenticationProvider.class);
 
   @Inject
+  private AuthConfig authConfig;
+
+  @Inject
   private UserDetailsService userDetailsService;
 
   @Override
   public Authentication authenticate(Authentication authentication) throws AuthenticationException {
-    if (!this.isEnable()) {
+    if (!authConfig.isAuthFileEnabled()) {
       logger.debug("File auth is disabled.");
       return authentication;
     }
@@ -80,9 +84,4 @@ public class LogsearchFileAuthenticationProvider extends LogsearchAbstractAuthen
     authentication = new UsernamePasswordAuthenticationToken(username, encPassword, authorities);
     return authentication;
   }
-
-  @Override
-  public boolean isEnable() {
-    return isEnable(AUTH_METHOD.FILE);
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
index 742cd9b..f6c7df0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapAuthenticationProvider.java
@@ -20,6 +20,7 @@ package org.apache.ambari.logsearch.web.security;
 
 import java.util.List;
 
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.log4j.Logger;
 import org.springframework.ldap.CommunicationException;
 import org.springframework.ldap.core.support.LdapContextSource;
@@ -31,6 +32,9 @@ import org.springframework.security.ldap.authentication.LdapAuthenticationProvid
 import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
 import org.springframework.stereotype.Component;
 
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+
 @Component
 public class LogsearchLdapAuthenticationProvider extends
   LogsearchAbstractAuthenticationProvider {
@@ -42,9 +46,16 @@ public class LogsearchLdapAuthenticationProvider extends
   private static LdapAuthenticationProvider ldapAuthProvider = null;
   private String logStatement = "";
 
+  @Inject
+  private AuthConfig authConfig;
+
   public LogsearchLdapAuthenticationProvider() {
+  }
+
+  @PostConstruct
+  public void postConstruct() {
     logger.debug("Creating object of ldap auth provider ");
-    if (this.isEnable()) {
+    if (authConfig.isAuthLdapEnabled()) {
       ldapAuthProvider = loadLdapAuthenticationProvider();
     } else {
       logger.info("Ldap auth is disabled");
@@ -54,7 +65,7 @@ public class LogsearchLdapAuthenticationProvider extends
   @Override
   public Authentication authenticate(Authentication authentication)
     throws AuthenticationException {
-    if (!this.isEnable()) {
+    if (!authConfig.isAuthLdapEnabled()) {
       logger.debug("Ldap auth is disabled");
       return authentication;
     }
@@ -98,7 +109,7 @@ public class LogsearchLdapAuthenticationProvider extends
    *
    * @return corresponding LDAP authentication provider
    */
-  LdapAuthenticationProvider loadLdapAuthenticationProvider() {
+  private LdapAuthenticationProvider loadLdapAuthenticationProvider() {
     if (reloadLdapServerProperties()) {
       logger.info("LDAP Properties changed - rebuilding Context");
       LdapContextSource springSecurityContextSource = new LdapContextSource();
@@ -173,9 +184,4 @@ public class LogsearchLdapAuthenticationProvider extends
     return false;
   }
 
-  @Override
-  public boolean isEnable() {
-    return isEnable(AUTH_METHOD.LDAP);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapBindAuthenticator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapBindAuthenticator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapBindAuthenticator.java
index f9207b1..10f7507 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapBindAuthenticator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchLdapBindAuthenticator.java
@@ -40,69 +40,7 @@ public class LogsearchLdapBindAuthenticator extends BindAuthenticator {
 
   @Override
   public DirContextOperations authenticate(Authentication authentication) {
-
-    DirContextOperations user = super.authenticate(authentication);
-
-    return setAmbariAdminAttr(user);
-  }
-
-  /**
-   * Checks whether user is a member of ambari administrators group in LDAP.
-   * If yes, sets user's ambari_admin attribute to true
-   *
-   * @param user
-   * @return
-   */
-  private DirContextOperations setAmbariAdminAttr(DirContextOperations user) {
-    String baseDn = ldapServerProperties.getBaseDN().toLowerCase();
-    String groupBase = ldapServerProperties.getGroupBase().toLowerCase();
-    String groupObjectClass = ldapServerProperties.getGroupObjectClass();
-    String groupMembershipAttr = ldapServerProperties
-      .getGroupMembershipAttr();
-    String adminGroupMappingRules = ldapServerProperties
-      .getAdminGroupMappingRules();
-    final String groupNamingAttribute = ldapServerProperties
-      .getGroupNamingAttr();
-    String groupSearchFilter = ldapServerProperties.getGroupSearchFilter();
-
-    // If groupBase is set incorrectly or isn't set - search in BaseDn
-    int indexOfBaseDn = groupBase.indexOf(baseDn);
-    groupBase = indexOfBaseDn <= 0 ? "" : groupBase.substring(0,
-      indexOfBaseDn - 1);
-
-    StringBuilder filterBuilder = new StringBuilder();
-
-    filterBuilder.append("(&(");
-    filterBuilder.append(groupMembershipAttr);
-    filterBuilder.append("=");
-    filterBuilder.append(user.getNameInNamespace());// DN
-
-    if ((groupSearchFilter == null) || groupSearchFilter.equals("")) {
-      // If groupSearchFilter is not specified, build it from other
-      // authorization
-      // group properties
-      filterBuilder.append(")(objectclass=");
-      filterBuilder.append(groupObjectClass);
-      filterBuilder.append(")(|");
-      String[] adminGroupMappingRegexs = adminGroupMappingRules
-        .split(",");
-      for (String adminGroupMappingRegex : adminGroupMappingRegexs) {
-        filterBuilder.append("(");
-        filterBuilder.append(groupNamingAttribute);
-        filterBuilder.append("=");
-        filterBuilder.append(adminGroupMappingRegex);
-        filterBuilder.append(")");
-      }
-      filterBuilder.append(")");
-    } else {
-      filterBuilder.append(")");
-      filterBuilder.append(groupSearchFilter);
-    }
-    filterBuilder.append(")");
-
-    logger.info("filter=" + filterBuilder);
-    // TODO: Filter is not used anywhere
-    return user;
+    return super.authenticate(authentication);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
index ec2516c..5dc1c1f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LogsearchSimpleAuthenticationProvider.java
@@ -18,6 +18,7 @@
  */
 package org.apache.ambari.logsearch.web.security;
 
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.ambari.logsearch.web.model.User;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.StringUtils;
@@ -28,14 +29,19 @@ import org.springframework.security.core.Authentication;
 import org.springframework.security.core.AuthenticationException;
 import org.springframework.stereotype.Component;
 
+import javax.inject.Inject;
+
 @Component
 public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuthenticationProvider {
 
   private static Logger logger = Logger.getLogger(LogsearchSimpleAuthenticationProvider.class);
 
+  @Inject
+  private AuthConfig authConfig;
+
   @Override
   public Authentication authenticate(Authentication authentication) throws AuthenticationException {
-    if (!this.isEnable()) {
+    if (!authConfig.isAuthSimpleEnabled()) {
       logger.debug("Simple auth is disabled");
       return authentication;
     }
@@ -64,9 +70,4 @@ public class LogsearchSimpleAuthenticationProvider extends LogsearchAbstractAuth
       return false;
     }
   }
-
-  @Override
-  public boolean isEnable() {
-    return this.isEnable(AUTH_METHOD.SIMPLE);
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
index ad96558..60b09cd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/log4j.xml
@@ -57,7 +57,7 @@
 
  <appender name="rolling_file_json"
   class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
-  <param name="file" value="logs/logsearch-app.json" />
+  <param name="file" value="target/logs/logsearch-app.json" />
   <param name="Threshold" value="info" />
   <param name="append" value="true" />
   <param name="maxFileSize" value="10MB" />
@@ -67,7 +67,7 @@
 
   <appender name="audit_rolling_file_json"
     class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
-    <param name="file" value="logs/logsearch-audit.json" />
+    <param name="file" value="target/logs/logsearch-audit.json" />
     <param name="Threshold" value="info" />
     <param name="append" value="true" />
     <param name="maxFileSize" value="10MB" />
@@ -77,7 +77,7 @@
 
   <appender name="performance_analyzer_json"
     class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
-    <param name="file" value="logs/logsearch-performance.json" />
+    <param name="file" value="target/logs/logsearch-performance.json" />
     <param name="Threshold" value="info" />
     <param name="append" value="true" />
     <param name="maxFileSize" value="10MB" />

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
deleted file mode 100644
index 9961cd0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
+++ /dev/null
@@ -1,83 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<beans:beans xmlns="http://www.springframework.org/schema/security"
-xmlns:beans="http://www.springframework.org/schema/beans"
-xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xmlns:security="http://www.springframework.org/schema/security"
-xsi:schemaLocation="http://www.springframework.org/schema/beans
-http://www.springframework.org/schema/beans/spring-beans-4.2.xsd
-http://www.springframework.org/schema/security
-http://www.springframework.org/schema/security/spring-security-4.0.xsd">
-  
-    <security:http pattern="/login.html" security="none" />
-    <security:http pattern="/styles/**" security="none" />
-    <security:http pattern="/fonts/**" security="none" />
-    <security:http pattern="/scripts/**" security="none" />
-    <security:http pattern="/libs/**" security="none" />
-    <security:http pattern="/images/**" security="none" />
-    <security:http pattern="/templates/**" security="none" />
-    <security:http pattern="/favicon.ico" security="none" />
-    <security:http pattern="/api/v1/public/**" security="none" />
-    <security:http pattern="/api/v1/swagger.json" security="none"/>
-    <security:http pattern="/api/v1/swagger.yaml" security="none"/>
-  	
-	<security:http disable-url-rewriting="true" use-expressions="true" create-session="always" entry-point-ref="authenticationProcessingFilterEntryPoint">
-		<csrf disabled="true"/> 
-		<security:session-management session-fixation-protection="newSession" />
-		<intercept-url pattern="/**" access="isAuthenticated()"/>       
-        <security:custom-filter ref="krbAuthenticationFilter" after="FIRST" />
-		<security:custom-filter position="FORM_LOGIN_FILTER" ref="logsearchUsernamePasswordAuthenticationFilter"/>
-		<security:custom-filter position="LAST" ref="userContextFormationFilter"/>
-		<security:logout delete-cookies="JSESSIONID" logout-url="/logout.html" success-handler-ref="logsearchLogoutSuccessHandler" />
-		<http-basic entry-point-ref="authenticationProcessingFilterEntryPoint"/>
-	</security:http>
-
-	<beans:bean id="userContextFormationFilter" class="org.apache.ambari.logsearch.web.filters.LogsearchSecurityContextFormationFilter"/>
-
-    <beans:bean id="krbAuthenticationFilter" class="org.apache.ambari.logsearch.web.filters.LogsearchKRBAuthenticationFilter"/>
-  
-	<beans:bean id="logsearchUsernamePasswordAuthenticationFilter" class="org.apache.ambari.logsearch.web.filters.LogsearchUsernamePasswordAuthenticationFilter">
-		<beans:property name="authenticationManager" ref="authenticationManager"/>
-		<beans:property name="authenticationSuccessHandler" ref="authSuccessHandler"/>
-		<beans:property name="authenticationFailureHandler"	ref="authFailureHandler"/>
-	</beans:bean>
-  
-	<beans:bean id="authenticationProcessingFilterEntryPoint" class="org.apache.ambari.logsearch.web.filters.LogsearchAuthenticationEntryPoint">
-		<beans:constructor-arg index="0" value="/login.html"/>
-		<beans:property name="forceHttps" value="false"/>
-	</beans:bean>
-
-	<beans:bean id="authSuccessHandler" class="org.apache.ambari.logsearch.web.authenticate.LogsearchAuthSuccessHandler">
-	</beans:bean>
-
-	<beans:bean id="authFailureHandler" class="org.apache.ambari.logsearch.web.authenticate.LogsearchAuthFailureHandler">
-	</beans:bean>
-
-	<beans:bean id="logsearchLogoutSuccessHandler" class="org.apache.ambari.logsearch.web.authenticate.LogsearchLogoutSuccessHandler">
-	</beans:bean>
-	
-	 <beans:bean id="logsearchAuthenticationProvider" class="org.apache.ambari.logsearch.web.security.LogsearchAuthenticationProvider" >
- 	</beans:bean>
-
-	<security:authentication-manager alias="authenticationManager">
-         <security:authentication-provider ref="logsearchAuthenticationProvider"/>
-	</security:authentication-manager>
-	
-	<beans:bean id="securityEventListener" class ="org.apache.ambari.logsearch.web.listeners.SpringEventListener"/>
-	
-</beans:beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
deleted file mode 100755
index 0f1beec..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://java.sun.com/xml/ns/javaee" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd" id="WebApp_ID" version="3.0">
-
-  <listener>
-    <listener-class>org.springframework.web.context.ContextLoaderListener</listener-class>
-  </listener>
-  <context-param>
-    <param-name>contextClass</param-name>
-    <param-value>
-      org.springframework.web.context.support.AnnotationConfigWebApplicationContext
-    </param-value>
-  </context-param>
-  <context-param>
-    <param-name>contextConfigLocation</param-name>
-    <param-value>org.apache.ambari.logsearch.conf.ApplicationConfig</param-value>
-  </context-param>
-
-	<listener>
-		<listener-class>org.springframework.web.context.request.RequestContextListener</listener-class>
-	</listener>
-	<filter>
-		<filter-name>springSecurityFilterChain</filter-name>
-		<filter-class>org.springframework.web.filter.DelegatingFilterProxy</filter-class>
-	</filter>
-
-	<filter-mapping>
-		<filter-name>springSecurityFilterChain</filter-name>
-		<url-pattern>/*</url-pattern>
-	</filter-mapping>
-  
-	<!-- Servlet mapping for REST -->
-	<servlet>
-		<servlet-name>REST service</servlet-name>
-		<servlet-class>org.glassfish.jersey.servlet.ServletContainer</servlet-class>
-		<init-param>
-			<param-name>jersey.config.server.provider.packages</param-name>
-			<param-value>org.apache.ambari.logsearch.rest,io.swagger.jaxrs.listing</param-value>
-		</init-param>
-		<load-on-startup>1</load-on-startup>
-	</servlet>
-	  <servlet-mapping>
-		<servlet-name>REST service</servlet-name>
-		<url-pattern>/api/v1/*</url-pattern>
-	</servlet-mapping>
-
-</web-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
index b588a2d..f930ec9 100644
--- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
+++ b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
@@ -21,7 +21,8 @@ logfeeder.config.files=shipper-conf/global.config.json,\
   shipper-conf/input.config-logsearch.json,\
   shipper-conf/input.config-hst.json,\
   shipper-conf/input.config-system_message.json,\
-  shipper-conf/input.config-secure_log.json
+  shipper-conf/input.config-secure_log.json,\
+  shipper-conf/input.config-hdfs.json
 logfeeder.log.filter.enable=true
 logfeeder.solr.config.interval=5
 logfeeder.solr.core.config.name=history

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json
new file mode 100644
index 0000000..d975b0d
--- /dev/null
+++ b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-hdfs.json
@@ -0,0 +1,172 @@
+{
+  "input": [
+    {
+      "type": "hdfs_audit",
+      "rowtype": "audit",
+      "is_enabled": "true",
+      "add_fields": {
+        "logType": "HDFSAudit",
+        "enforcer": "hadoop-acl",
+        "repoType": "1",
+        "repo": "hdfs"
+      },
+      "path": "/root/test-logs/hdfs-audit/hdfs-audit.log"
+    }
+  ],
+  "filter": [
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+
+        }
+
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+
+        }
+
+      },
+      "source_field":"log_message",
+      "value_split":"=",
+      "field_split":"\t",
+      "post_map_values":{
+        "src":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+
+        },
+        "ip":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+
+        },
+        "allowed":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"true",
+              "post_value":"1"
+            }
+
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"false",
+              "post_value":"0"
+            }
+
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+
+          }
+
+        ],
+        "cmd":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+
+        },
+        "proto":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+
+        },
+        "callerContext":{
+          "map_fieldname":{
+            "new_fieldname":"req_caller_id"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "sort_order":2,
+      "source_field":"ugi",
+      "remove_source_field":"false",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+
+        }
+
+      },
+      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+      "post_map_values":{
+        "user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+
+        },
+        "x_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+
+        },
+        "p_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+
+        },
+        "k_user":{
+          "map_fieldname":{
+            "new_fieldname":"proxyUsers"
+          }
+
+        },
+        "p_authType":{
+          "map_fieldname":{
+            "new_fieldname":"authType"
+          }
+
+        },
+        "k_authType":{
+          "map_fieldname":{
+            "new_fieldname":"proxyAuthType"
+          }
+
+        }
+
+      }
+
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d15e0b2d/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log b/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log
new file mode 100644
index 0000000..a646cc4
--- /dev/null
+++ b/ambari-logsearch/docker/test-logs/hdfs-audit/hdfs-audit.log
@@ -0,0 +1,4 @@
+2016-03-18 10:00:47,252 INFO FSNamesystem.audit: allowed=true	ugi=ambari-qa (auth:SIMPLE)	ip=/192.168.64.102	cmd=getfileinfo	src=/ats/active	dst=null	perm=null	proto=rpc	callerContext=HIVE_QUERY_ID:ambari-qa_20160317200111_223b3079-4a2d-431c-920f-6ba37ed63e9f
+2016-03-18 10:00:48,939 INFO FSNamesystem.audit: allowed=true	ugi=ambari-qa (auth:SIMPLE)	ip=/192.168.64.102	cmd=delete	src=/tmp/hive/ambari-qa/resource1	dst=null	perm=null	proto=rpc
+2016-03-18 10:00:49,242 INFO FSNamesystem.audit: allowed=true	ugi=ambari-qa (auth:SIMPLE)	ip=/192.168.64.102	cmd=getfileinfo	src=/tmp/hive/ambari-qa/resource2	dst=null	perm=null	proto=rpc
+2016-03-18 10:00:49,277 INFO FSNamesystem.audit: allowed=true	ugi=ambari-qa (auth:SIMPLE)	ip=/192.168.64.102	cmd=getfileinfo	src=/tmp/hive/ambari-qa/resource2	dst=null	perm=null	proto=rpc


[23/50] [abbrv] ambari git commit: AMBARI-18253. Fix LogSearch utility classes (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index 9baccce..53e0aab 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -38,16 +38,19 @@ import java.util.concurrent.CopyOnWriteArrayList;
 
 import javax.ws.rs.core.Response;
 
+import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
 import org.apache.ambari.logsearch.graph.GraphDataGenerator;
 import org.apache.ambari.logsearch.query.QueryGenerationBase;
 import org.apache.ambari.logsearch.util.BizUtil;
-import org.apache.ambari.logsearch.util.ConfigUtil;
+import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.FileUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.view.VBarDataList;
 import org.apache.ambari.logsearch.view.VBarGraphData;
 import org.apache.ambari.logsearch.view.VCount;
@@ -97,10 +100,6 @@ public class LogsMgr extends MgrBase {
   @Autowired
   private ServiceLogsSolrDao serviceLogsSolrDao;
   @Autowired
-  private BizUtil bizUtil;
-  @Autowired
-  private FileUtil fileUtil;
-  @Autowired
   private GraphDataGenerator graphDataGenerator;
 
   public String searchLogs(SearchCriteria searchCriteria) {
@@ -114,7 +113,7 @@ public class LogsMgr extends MgrBase {
         return getPageByKeyword(searchCriteria);
       } catch (SolrException | SolrServerException e) {
         logger.error("Error while getting keyword=" + keyword, e);
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
             .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
     } else if (!StringUtils.isBlank(logId)) {
@@ -122,7 +121,7 @@ public class LogsMgr extends MgrBase {
         return getPageByLogId(searchCriteria);
       } catch (SolrException e) {
         logger.error("Error while getting keyword=" + keyword, e);
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
             .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
     } else if (isLastPage) {
@@ -152,10 +151,10 @@ public class LogsMgr extends MgrBase {
 
     SolrQuery solrQuery = new SolrQuery();
     VGroupList collection = new VGroupList();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setFacetField(solrQuery,
+    SolrUtil.setMainQuery(solrQuery, null);
+    SolrUtil.setFacetField(solrQuery,
         field);
-    queryGenerator.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if(response == null){
@@ -190,7 +189,7 @@ public class LogsMgr extends MgrBase {
       return convertObjToString(collection);
     } catch (IOException | SolrServerException | SolrException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
@@ -205,8 +204,8 @@ public class LogsMgr extends MgrBase {
     String hierarchy = "host,type,level";
     VGraphInfo graphInfo = new VGraphInfo();
     try {
-      queryGenerator.setMainQuery(solrQuery, null);
-      queryGenerator.setFacetPivot(solrQuery, 1, hierarchy);
+      SolrUtil.setMainQuery(solrQuery, null);
+      SolrUtil.setFacetPivot(solrQuery, 1, hierarchy);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if (response == null) {
         return convertObjToString(graphInfo);
@@ -228,7 +227,7 @@ public class LogsMgr extends MgrBase {
       return convertObjToString(graphInfo);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
@@ -255,11 +254,11 @@ public class LogsMgr extends MgrBase {
     VCountList collection = new VCountList();
     List<VCount> vCounts = new ArrayList<VCount>();
     SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
+    SolrUtil.setMainQuery(solrQuery, null);
     if(field == null){
       return collection;
     }
-    queryGenerator.setFacetField(solrQuery, field);
+    SolrUtil.setFacetField(solrQuery, field);
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if (response == null){
@@ -286,7 +285,7 @@ public class LogsMgr extends MgrBase {
 
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
@@ -419,7 +418,7 @@ public class LogsMgr extends MgrBase {
     VNodeList list = new VNodeList();
     try {
 
-      queryGenerator.setFacetPivot(solrQuery, 1, firstHirarchy,
+      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
         secondHirarchy);
 
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
@@ -454,7 +453,7 @@ public class LogsMgr extends MgrBase {
       list.setvNodeList(dataList);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
@@ -485,7 +484,7 @@ public class LogsMgr extends MgrBase {
     String secondHirarchy = "type,level";
 
     try {
-      queryGenerator.setFacetPivot(solrQuery, 1, firstHirarchy,
+      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
         secondHirarchy);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       List<List<PivotField>> firstHirarchicalPivotFields = null;
@@ -516,7 +515,7 @@ public class LogsMgr extends MgrBase {
       return convertObjToString(list);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
@@ -536,7 +535,7 @@ public class LogsMgr extends MgrBase {
     HashMap<String, String> map = new HashMap<String, String>();
     List<VNameValue> logsCounts = new ArrayList<VNameValue>();
     try {
-      queryGenerator.setFacetField(query, LogSearchConstants.SOLR_LEVEL);
+      SolrUtil.setFacetField(query, LogSearchConstants.SOLR_LEVEL);
       List<Count> logLevelCounts = getFacetCounts(query,
           LogSearchConstants.SOLR_LEVEL);
       if (logLevelCounts == null) {
@@ -587,11 +586,11 @@ public class LogsMgr extends MgrBase {
 
     String key = (String) searchCriteria.getParamValue("keyword");
     if(StringUtils.isBlank(key)){
-      throw restErrorUtil.createRESTException("Keyword was not given",
+      throw RESTErrorUtil.createRESTException("Keyword was not given",
           MessageEnums.DATA_NOT_FOUND);
     }
 
-    String keyword = solrUtil.escapeForStandardTokenizer(key);
+    String keyword = SolrUtil.escapeForStandardTokenizer(key);
 
     if(keyword.startsWith("\"") && keyword.endsWith("\"")){
       keyword = keyword.substring(1);
@@ -625,13 +624,13 @@ public class LogsMgr extends MgrBase {
         queryResponse = serviceLogsSolrDao.process(
             nextPageLogTimeQuery);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList docList = queryResponse.getResults();
         if(docList ==null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
@@ -639,10 +638,10 @@ public class LogsMgr extends MgrBase {
 
         Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
         if(logDate == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
-        nextPageLogTime = dateUtil
+        nextPageLogTime = DateUtil
           .convertDateWithMillisecondsToSolrDate(logDate);
         nextPageLogID = ""
           + solrDoc.get(LogSearchConstants.ID);
@@ -661,17 +660,17 @@ public class LogsMgr extends MgrBase {
           LogSearchConstants.LOGTIME, "\"" + nextPageLogTime + "\"");
         queryGenerator.setSingleExcludeFilter(listRemoveIds,
           LogSearchConstants.ID, nextPageLogID);
-        queryGenerator.setFl(listRemoveIds, LogSearchConstants.ID);
+        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
         queryResponse = serviceLogsSolrDao.process(
             listRemoveIds);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList docListIds = queryResponse.getResults();
         if(docListIds ==null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
         boolean isFirst = true;
@@ -726,13 +725,13 @@ public class LogsMgr extends MgrBase {
         queryResponse = serviceLogsSolrDao.process(
             logTimeThroughRangeQuery);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList documentList = queryResponse.getResults();
         if(documentList ==null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
@@ -743,10 +742,10 @@ public class LogsMgr extends MgrBase {
 
         Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
         if(keywordLogDate == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
-        String originalKeywordDate = dateUtil
+        String originalKeywordDate = DateUtil
           .convertDateWithMillisecondsToSolrDate(keywordLogDate);
         String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
 
@@ -758,14 +757,14 @@ public class LogsMgr extends MgrBase {
         if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
           keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, 1);
-          String keywordDateTime = dateUtil
+          String keywordDateTime = DateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, startTime,
             keywordDateTime);
         } else {
           keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, -1);
-          String keywordDateTime = dateUtil
+          String keywordDateTime = DateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, keywordDateTime,
@@ -784,7 +783,7 @@ public class LogsMgr extends MgrBase {
             .commonServiceFilterQuery(searchCriteria);
           queryGenerator.setSingleIncludeFilter(sameIdQuery,
             LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          queryGenerator.setFl(sameIdQuery, LogSearchConstants.ID);
+          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
           SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
             .getResults();
           for (SolrDocument solrDocumenent : sameQueryDocList) {
@@ -819,7 +818,7 @@ public class LogsMgr extends MgrBase {
         int maxRows = searchCriteria.getMaxRows();
 
         if (currentPageNumber == 0) {
-          throw restErrorUtil.createRESTException("This is first Page Not",
+          throw RESTErrorUtil.createRESTException("This is first Page Not",
             MessageEnums.DATA_NOT_FOUND);
         }
 
@@ -838,20 +837,20 @@ public class LogsMgr extends MgrBase {
         queryResponse = serviceLogsSolrDao.process(
             lastLogTime);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList docList = queryResponse.getResults();
         if(docList ==null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
         SolrDocument solrDoc = docList.get(0);
 
         Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
         String sortByType = searchCriteria.getSortType();
-        lastLogsLogTime = dateUtil
+        lastLogsLogTime = DateUtil
           .convertDateWithMillisecondsToSolrDate(logDate);
         String lastLogsLogId = ""
           + solrDoc.get(LogSearchConstants.ID);
@@ -867,17 +866,17 @@ public class LogsMgr extends MgrBase {
           LogSearchConstants.LOGTIME, "\"" + lastLogsLogTime + "\"");
         queryGenerator.setSingleExcludeFilter(listRemoveIds,
           LogSearchConstants.ID, lastLogsLogId);
-        queryGenerator.setFl(listRemoveIds, LogSearchConstants.ID);
+        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
         queryResponse = serviceLogsSolrDao.process(
             lastLogTime);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList docListIds = queryResponse.getResults();
         if(docListIds == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
         boolean isFirst = true;
@@ -942,13 +941,13 @@ public class LogsMgr extends MgrBase {
         queryResponse = serviceLogsSolrDao.process(
             logTimeThroughRangeQuery);
         if(queryResponse == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
 
         SolrDocumentList documentList = queryResponse.getResults();
         if(documentList == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
         SolrDocument solrDocument = new SolrDocument();
@@ -958,10 +957,10 @@ public class LogsMgr extends MgrBase {
 
         Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
         if(keywordLogDate == null){
-          throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
               MessageEnums.ERROR_SYSTEM);
         }
-        String originalKeywordDate = dateUtil
+        String originalKeywordDate = DateUtil
           .convertDateWithMillisecondsToSolrDate(keywordLogDate);
         String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
 
@@ -972,8 +971,8 @@ public class LogsMgr extends MgrBase {
 
         if (!StringUtils.isBlank(sortByType) && sortByType
           .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
-       //   keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, 1);
-          String keywordDateTime = dateUtil
+       //   keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, 1);
+          String keywordDateTime = DateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, startTime,
@@ -981,8 +980,8 @@ public class LogsMgr extends MgrBase {
 
 
         } else {
-     //     keywordLogDate = dateUtil.addMilliSecondsToDate(keywordLogDate, -1);
-          String keywordDateTime = dateUtil
+     //     keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, -1);
+          String keywordDateTime = DateUtil
             .convertDateWithMillisecondsToSolrDate(keywordLogDate);
           queryGenerator.setSingleRangeFilter(rangeLogQuery,
             LogSearchConstants.LOGTIME, keywordDateTime,
@@ -998,7 +997,7 @@ public class LogsMgr extends MgrBase {
             .commonServiceFilterQuery(searchCriteria);
           queryGenerator.setSingleIncludeFilter(sameIdQuery,
             LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
-          queryGenerator.setFl(sameIdQuery, LogSearchConstants.ID);
+          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
           SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
             .getResults();
           for (SolrDocument solrDocumenent : sameQueryDocList) {
@@ -1028,7 +1027,7 @@ public class LogsMgr extends MgrBase {
       }
 
     }
-    throw restErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+    throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
         MessageEnums.ERROR_SYSTEM);
   }
 
@@ -1051,10 +1050,10 @@ public class LogsMgr extends MgrBase {
     try {
 
       SolrQuery logTimeByIdQuery = new SolrQuery();
-      queryGenerator.setMainQuery(logTimeByIdQuery, null);
+      SolrUtil.setMainQuery(logTimeByIdQuery, null);
       queryGenerator.setSingleIncludeFilter(logTimeByIdQuery,
           LogSearchConstants.ID, logId);
-      queryGenerator.setRowCount(solrQuery, 1);
+      SolrUtil.setRowCount(solrQuery, 1);
 
       QueryResponse queryResponse = serviceLogsSolrDao
           .process(logTimeByIdQuery);
@@ -1073,9 +1072,9 @@ public class LogsMgr extends MgrBase {
       }
 
       if (dateOfLogId != null) {
-        logTime = dateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
+        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
         Date endDate = DateUtils.addMilliseconds(dateOfLogId, 1);
-        endTimeMinusOneMilli = (String) dateUtil
+        endTimeMinusOneMilli = (String) DateUtil
             .convertDateWithMillisecondsToSolrDate(endDate);
       }
 
@@ -1088,7 +1087,7 @@ public class LogsMgr extends MgrBase {
       solrQuery.remove(LogSearchConstants.LOGTIME);
       queryGenerator.setSingleRangeFilter(solrQuery,
           LogSearchConstants.LOGTIME, endTimeMinusOneMilli, endLogTime);
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setRowCount(solrQuery, 0);
       startIndex = countQuery(solrQuery,serviceLogsSolrDao);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error(e);
@@ -1128,7 +1127,7 @@ public class LogsMgr extends MgrBase {
       logger.error(e);
     }
 
-    throw restErrorUtil.createRESTException("LogId not Found",
+    throw RESTErrorUtil.createRESTException("LogId not Found",
         MessageEnums.ERROR_SYSTEM);
   }
 
@@ -1138,7 +1137,7 @@ public class LogsMgr extends MgrBase {
     List<VNameValue> logsCounts = new ArrayList<VNameValue>();
     try {
 
-      queryGenerator.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
+      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
         from, to, unit);
 
       List<RangeFacet.Count> logLevelCounts = null;
@@ -1205,8 +1204,8 @@ public class LogsMgr extends MgrBase {
         "\\", "");
 
     try {
-      queryGenerator.setJSONFacet(solrQuery, jsonHistogramQuery);
-      queryGenerator.setRowCount(solrQuery,Integer.parseInt(deafalutValue));
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery,Integer.parseInt(deafalutValue));
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if (response == null){
         return convertObjToString(dataList);
@@ -1259,7 +1258,7 @@ public class LogsMgr extends MgrBase {
 
     } catch (SolrServerException | SolrException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
 
     }
@@ -1278,7 +1277,7 @@ public class LogsMgr extends MgrBase {
   public String cancelFindRequestByDate(String uniqueId) {
     if (StringUtils.isEmpty(uniqueId)) {
       logger.error("Unique id is Empty");
-      throw restErrorUtil.createRESTException("Unique id is Empty",
+      throw RESTErrorUtil.createRESTException("Unique id is Empty",
         MessageEnums.DATA_NOT_FOUND);
     }
 
@@ -1292,7 +1291,7 @@ public class LogsMgr extends MgrBase {
   public boolean cancelRequest(String uniqueId) {
     if (StringUtils.isBlank(uniqueId)) {
       logger.error("Unique id is Empty");
-      throw restErrorUtil.createRESTException("Unique id is Empty",
+      throw RESTErrorUtil.createRESTException("Unique id is Empty",
         MessageEnums.DATA_NOT_FOUND);
     }
     for (String date : cancelByDate) {
@@ -1318,10 +1317,10 @@ public class LogsMgr extends MgrBase {
       utcOffset = "0";
     }
 
-    if (!dateUtil.isDateValid(from) || !dateUtil.isDateValid(to)) {
+    if (!DateUtil.isDateValid(from) || !DateUtil.isDateValid(to)) {
       logger.error("Not valid date format. Valid format should be"
           + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
-      throw restErrorUtil.createRESTException("Not valid date format. Valid format should be"
+      throw RESTErrorUtil.createRESTException("Not valid date format. Valid format should be"
           + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z,
           MessageEnums.INVALID_INPUT_DATA);
 
@@ -1332,13 +1331,13 @@ public class LogsMgr extends MgrBase {
       to = to.replace("T", " ");
       to = to.replace(".", ",");
 
-      to = dateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
+      to = DateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
           "yyyy-MM-dd HH:mm:ss,SSS");
-      from = dateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
+      from = DateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
           "yyyy-MM-dd HH:mm:ss,SSS");
     }
 
-    String fileName = dateUtil.getCurrentDateInString();
+    String fileName = DateUtil.getCurrentDateInString();
     if (searchCriteria.getParamValue("hostLogFile") != null
       && searchCriteria.getParamValue("compLogFile") != null) {
       fileName = searchCriteria.getParamValue("hostLogFile") + "_"
@@ -1349,16 +1348,16 @@ public class LogsMgr extends MgrBase {
     try {
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if (response == null) {
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
             .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
       SolrDocumentList docList = response.getResults();
       if (docList == null) {
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
             .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
 
-      VSummary vsummary = bizUtil.buildSummaryForLogFile(docList);
+      VSummary vsummary = BizUtil.buildSummaryForLogFile(docList);
       vsummary.setFormat(format);
       vsummary.setFrom(from);
       vsummary.setTo(to);
@@ -1421,7 +1420,7 @@ public class LogsMgr extends MgrBase {
 
         Date logTimeDateObj = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
         if(logTimeDateObj != null){
-        String logTime = dateUtil.convertSolrDateToNormalDateFormat(
+        String logTime = DateUtil.convertSolrDateToNormalDateFormat(
             logTimeDateObj.getTime(), Long.parseLong(utcOffset));
         solrDoc.remove(LogSearchConstants.LOGTIME);
         solrDoc.addField(LogSearchConstants.LOGTIME, logTime);
@@ -1429,20 +1428,20 @@ public class LogsMgr extends MgrBase {
       }
 
       if (format.toLowerCase(Locale.ENGLISH).equals(".txt")) {
-        textToSave = bizUtil.convertObjectToNormalText(docList);
+        textToSave = BizUtil.convertObjectToNormalText(docList);
       } else if (format.toLowerCase(Locale.ENGLISH).equals(".json")) {
         textToSave = convertObjToString(docList);
       } else {
-        throw restErrorUtil.createRESTException(
+        throw RESTErrorUtil.createRESTException(
             "unsoported format either should be json or text",
             MessageEnums.ERROR_SYSTEM);
       }
-      return fileUtil.saveToFile(textToSave, fileName, vsummary);
+      return FileUtil.saveToFile(textToSave, fileName, vsummary);
 
     } catch (SolrException | SolrServerException | IOException
       | ParseException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
@@ -1460,7 +1459,7 @@ public class LogsMgr extends MgrBase {
     VNodeList list = new VNodeList();
     try {
 
-      queryGenerator.setFacetPivot(solrQuery, 1, componentLevelHirachy);
+      SolrUtil.setFacetPivot(solrQuery, 1, componentLevelHirachy);
 
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
 
@@ -1498,7 +1497,7 @@ public class LogsMgr extends MgrBase {
       return convertObjToString(list);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error(e.getMessage() + "SolrQuery"+solrQuery);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
@@ -1516,9 +1515,9 @@ public class LogsMgr extends MgrBase {
       queryGenerator.setSingleIncludeFilter(solrQuery,
         LogSearchConstants.BUNDLE_ID, bundelId);
 
-      queryGenerator.setMainQuery(solrQuery, null);
+      SolrUtil.setMainQuery(solrQuery, null);
       solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.asc);
-      queryGenerator.setRowCount(solrQuery, 1);
+      SolrUtil.setRowCount(solrQuery, 1);
 
       List<VNameValue> vNameValues = new ArrayList<VNameValue>();
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
@@ -1544,11 +1543,11 @@ public class LogsMgr extends MgrBase {
       }
 
       solrQuery.clear();
-      queryGenerator.setMainQuery(solrQuery, null);
+      SolrUtil.setMainQuery(solrQuery, null);
       queryGenerator.setSingleIncludeFilter(solrQuery,
         LogSearchConstants.BUNDLE_ID, bundelId);
       solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
-      queryGenerator.setRowCount(solrQuery, 1);
+      SolrUtil.setRowCount(solrQuery, 1);
 
       solrDocList.clear();
       response = serviceLogsSolrDao.process(solrQuery);
@@ -1595,14 +1594,14 @@ public class LogsMgr extends MgrBase {
   }
 
   public String getServiceLogsFieldsName() {
-    String fieldsNameStrArry[] = PropertiesUtil
+    String fieldsNameStrArry[] = PropertiesHelper
       .getPropertyStringList("logsearch.service.logs.fields");
     if (fieldsNameStrArry.length > 0) {
 
       List<String> uiFieldNames = new ArrayList<String>();
       String temp = null;
       for (String field : fieldsNameStrArry) {
-        temp = ConfigUtil.serviceLogsColumnMapping.get(field
+        temp = ConfigHelper.serviceLogsColumnMapping.get(field
             + LogSearchConstants.SOLR_SUFFIX);
         if (temp == null){
           uiFieldNames.add(field);
@@ -1613,7 +1612,7 @@ public class LogsMgr extends MgrBase {
       return convertObjToString(uiFieldNames);
 
     }
-    throw restErrorUtil.createRESTException(
+    throw RESTErrorUtil.createRESTException(
       "No field name found in property file",
       MessageEnums.DATA_NOT_FOUND);
 
@@ -1622,14 +1621,14 @@ public class LogsMgr extends MgrBase {
   public String getServiceLogsSchemaFieldsName() {
 
     List<String> fieldNames = new ArrayList<String>();
-    String excludeArray[] = PropertiesUtil
+    String excludeArray[] = PropertiesHelper
         .getPropertyStringList("logsearch.solr.service.logs.exclude.columnlist");
 
     HashMap<String, String> uiFieldColumnMapping = new LinkedHashMap<String, String>();
-    ConfigUtil.getSchemaFieldsName(excludeArray, fieldNames,serviceLogsSolrDao);
+    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames,serviceLogsSolrDao);
 
     for (String fieldName : fieldNames) {
-      String uiField = ConfigUtil.serviceLogsColumnMapping.get(fieldName
+      String uiField = ConfigHelper.serviceLogsColumnMapping.get(fieldName
           + LogSearchConstants.SOLR_SUFFIX);
       if (uiField != null) {
         uiFieldColumnMapping.put(fieldName, uiField);
@@ -1641,7 +1640,7 @@ public class LogsMgr extends MgrBase {
     HashMap<String, String> uiFieldColumnMappingSorted = new LinkedHashMap<String, String>();
     uiFieldColumnMappingSorted.put(LogSearchConstants.SOLR_LOG_MESSAGE, LogSearchConstants.SOLR_LOG_MESSAGE);
 
-    Iterator<Entry<String, String>> it = bizUtil
+    Iterator<Entry<String, String>> it = BizUtil
         .sortHashMapByValues(uiFieldColumnMapping).entrySet().iterator();
     while (it.hasNext()) {
       @SuppressWarnings("rawtypes")
@@ -1674,7 +1673,7 @@ public class LogsMgr extends MgrBase {
         .get(innerField)).get("buckets");
       for (Object temp1 : levelBuckets) {
         SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
-        String value = dateUtil
+        String value = DateUtil
           .convertDateWithMillisecondsToSolrDate((Date) countValue
             .getVal(0));
 
@@ -1724,9 +1723,9 @@ public class LogsMgr extends MgrBase {
     String sequenceId = null;
     try {
       SolrQuery solrQuery = new SolrQuery();
-      queryGenerator.setMainQuery(solrQuery,
+      SolrUtil.setMainQuery(solrQuery,
         queryGenerator.buildFilterQuery(LogSearchConstants.ID, id));
-      queryGenerator.setRowCount(solrQuery, 1);
+      SolrUtil.setRowCount(solrQuery, 1);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery);
       if(response == null){
         return convertObjToString(vSolrLogList);
@@ -1735,7 +1734,7 @@ public class LogsMgr extends MgrBase {
       if (docList != null && !docList.isEmpty()) {
         Date date = (Date) docList.get(0).getFieldValue(
           LogSearchConstants.LOGTIME);
-        logTime = dateUtil.convertDateWithMillisecondsToSolrDate(date);
+        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(date);
         sequenceId = ""
           + docList.get(0).getFieldValue(
           LogSearchConstants.SEQUNCE_ID);
@@ -1744,7 +1743,7 @@ public class LogsMgr extends MgrBase {
         return convertObjToString(vSolrLogList);
       }
     } catch (SolrServerException | SolrException | IOException e) {
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
           .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType)) {
@@ -1798,7 +1797,7 @@ public class LogsMgr extends MgrBase {
   private VSolrLogList whenScrollUp(SearchCriteria searchCriteria,
                                     String logTime, String sequenceId, String maxRows) {
     SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
+    SolrUtil.setMainQuery(solrQuery, null);
     /*queryGenerator.setSingleExcludeFilter(solrQuery,
         LogSearchConstants.SEQUNCE_ID, sequenceId);*/
     try {
@@ -1815,7 +1814,7 @@ public class LogsMgr extends MgrBase {
 
     queryGenerator.setSingleRangeFilter(solrQuery,
       LogSearchConstants.LOGTIME, "*", logTime);
-    queryGenerator.setRowCount(solrQuery, Integer.parseInt(maxRows));
+    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
     String order1 = LogSearchConstants.LOGTIME + " "
       + LogSearchConstants.DESCENDING_ORDER;
     String order2 = LogSearchConstants.SEQUNCE_ID + " "
@@ -1832,7 +1831,7 @@ public class LogsMgr extends MgrBase {
   private VSolrLogList whenScrollDown(SearchCriteria searchCriteria,
                                       String logTime, String sequenceId, String maxRows) {
     SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
+    SolrUtil.setMainQuery(solrQuery, null);
     queryGenerator.applyLogFileFilter(solrQuery, searchCriteria);
 
     /*queryGenerator.setSingleExcludeFilter(solrQuery,
@@ -1848,7 +1847,7 @@ public class LogsMgr extends MgrBase {
       LogSearchConstants.SEQUNCE_ID, sequenceId, "*");
     queryGenerator.setSingleRangeFilter(solrQuery,
       LogSearchConstants.LOGTIME, logTime, "*");
-    queryGenerator.setRowCount(solrQuery, Integer.parseInt(maxRows));
+    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
 
     String order1 = LogSearchConstants.LOGTIME + " "
       + LogSearchConstants.ASCENDING_ORDER;
@@ -1874,13 +1873,13 @@ public class LogsMgr extends MgrBase {
     utc.set(Calendar.MINUTE, 0);
     utc.set(Calendar.MILLISECOND, 001);
     utc.set(Calendar.SECOND, 0);
-    dateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
-    String from = dateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    String from = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
     utc.set(Calendar.MILLISECOND, 999);
     utc.set(Calendar.SECOND, 59);
     utc.set(Calendar.MINUTE, 59);
     utc.set(Calendar.HOUR, 23);
-    String to = dateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    String to = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
     queryGenerator.setSingleRangeFilter(solrQuery,
         LogSearchConstants.LOGTIME, from,to);
     String level = LogSearchConstants.FATAL+","+LogSearchConstants.ERROR+","+LogSearchConstants.WARN;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
index 02e2e69..c0be79d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/MgrBase.java
@@ -60,20 +60,8 @@ public class MgrBase {
   private static final Logger logger = Logger.getLogger(MgrBase.class);
 
   @Autowired
-  protected SolrUtil solrUtil;
-
-  @Autowired
-  protected JSONUtil jsonUtil;
-
-  @Autowired
   protected QueryGeneration queryGenerator;
 
-  @Autowired
-  protected RESTErrorUtil restErrorUtil;
-
-  @Autowired
-  protected DateUtil dateUtil;
-
   private JsonSerializer<Date> jsonDateSerialiazer = null;
   private JsonDeserializer<Date> jsonDateDeserialiazer = null;
 
@@ -142,14 +130,14 @@ public class MgrBase {
 
     } catch (IOException e) {
       logger.error("Unable to read HadoopServiceConfig.json", e);
-      throw restErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     String hadoopServiceConfig = result.toString();
-    if (jsonUtil.isJSONValid(hadoopServiceConfig)) {
+    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
       return hadoopServiceConfig;
     }
-    throw restErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
+    throw RESTErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
 
   }
   
@@ -174,8 +162,8 @@ public class MgrBase {
     int numberOfLogsOnLastPage = 0;
     VSolrLogList collection = null;
     try {
-      queryGenerator.setStart(lastPageQuery, 0);
-      queryGenerator.setRowCount(lastPageQuery, maxRows);
+      SolrUtil.setStart(lastPageQuery, 0);
+      SolrUtil.setRowCount(lastPageQuery, maxRows);
       collection = getLogAsPaginationProvided(lastPageQuery, solrDoaBase);
       totalLogs = countQuery(lastPageQuery,solrDoaBase);
       if(maxRows != null){
@@ -199,7 +187,7 @@ public class MgrBase {
 
     } catch (SolrException | SolrServerException | IOException | NumberFormatException e) {
       logger.error("Count Query was not executed successfully",e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return collection;
   }
@@ -223,7 +211,7 @@ public class MgrBase {
       return collection;
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error during solrQuery=" + solrQuery, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
   
@@ -249,9 +237,9 @@ public class MgrBase {
 
   protected String getFrom(String from) {
     if (StringUtils.isBlank(from)) {
-      Date date =  dateUtil.getTodayFromDate();
+      Date date = DateUtil.getTodayFromDate();
       try {
-        from = dateUtil.convertGivenDateFormatToSolrDateFormat(date);
+        from = DateUtil.convertGivenDateFormatToSolrDateFormat(date);
       } catch (ParseException e) {
         from = "NOW";
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
index 28f806c..59c1bbd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigMgr.java
@@ -29,6 +29,7 @@ import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
 import org.apache.ambari.logsearch.query.QueryGeneration;
+import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
@@ -55,21 +56,17 @@ public class UserConfigMgr extends MgrBase {
   @Autowired
   private UserConfigSolrDao userConfigSolrDao;
   @Autowired
-  private SolrUtil solrUtil;
-  @Autowired
-  private RESTErrorUtil restErrorUtil;
-  @Autowired
   private QueryGeneration queryGenerator;
 
   public String saveUserConfig(VUserConfig vHistory) {
 
     SolrInputDocument solrInputDoc = new SolrInputDocument();
     if (!isValid(vHistory)) {
-      throw restErrorUtil.createRESTException("No FilterName Specified", MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("No FilterName Specified", MessageEnums.INVALID_INPUT_DATA);
     }
 
     if (isNotUnique(vHistory) && !vHistory.isOverwrite()) {
-      throw restErrorUtil.createRESTException( "Name '" + vHistory.getFiltername() + "' already exists", MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException( "Name '" + vHistory.getFiltername() + "' already exists", MessageEnums.INVALID_INPUT_DATA);
     }
     String loggedInUserName = vHistory.getUserName();
     String filterName = vHistory.getFiltername();
@@ -85,8 +82,8 @@ public class UserConfigMgr extends MgrBase {
     }
     // Check whether the Filter Name exists in solr
     SolrQuery solrQuery = new SolrQuery();
-    queryGenerator.setMainQuery(solrQuery, null);
-    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
+    SolrUtil.setMainQuery(solrQuery, null);
+    queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
     queryGenerator.setSingleIncludeFilter(solrQuery, LogSearchConstants.USER_NAME, loggedInUserName);
     try {
       QueryResponse queryResponse = userConfigSolrDao.process(solrQuery);
@@ -94,12 +91,12 @@ public class UserConfigMgr extends MgrBase {
         SolrDocumentList documentList = queryResponse.getResults();
         if (documentList != null && !documentList.isEmpty() && !vHistory.isOverwrite()) {
           logger.error("Filtername is already present");
-          throw restErrorUtil.createRESTException("Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
+          throw RESTErrorUtil.createRESTException("Filtername is already present", MessageEnums.INVALID_INPUT_DATA);
         }
       }
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error in checking same filtername config", e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     try {
@@ -107,7 +104,7 @@ public class UserConfigMgr extends MgrBase {
       return convertObjToString(solrInputDoc);
     } catch (SolrException | SolrServerException | IOException e) {
       logger.error("Error saving user config. solrDoc=" + solrInputDoc, e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -117,9 +114,9 @@ public class UserConfigMgr extends MgrBase {
 
     if (filterName != null && rowType != null) {
       SolrQuery solrQuery = new SolrQuery();
-      filterName = solrUtil.makeSearcableString(filterName);
+      filterName = SolrUtil.makeSearcableString(filterName);
       solrQuery.setQuery(LogSearchConstants.COMPOSITE_KEY + ":" + filterName + "-" + rowType);
-      queryGenerator.setRowCount(solrQuery, 0);
+      SolrUtil.setRowCount(solrQuery, 0);
       try {
         Long numFound = userConfigSolrDao.process(solrQuery).getResults().getNumFound();
         if (numFound > 0) {
@@ -143,7 +140,7 @@ public class UserConfigMgr extends MgrBase {
     try {
       userConfigSolrDao.deleteUserConfig(id);
     } catch (SolrException | SolrServerException | IOException e) {
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
   }
 
@@ -155,12 +152,12 @@ public class UserConfigMgr extends MgrBase {
 
     String rowType = (String) searchCriteria.getParamValue(LogSearchConstants.ROW_TYPE);
     if (StringUtils.isBlank(rowType)) {
-      throw restErrorUtil.createRESTException("row type was not specified", MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("row type was not specified", MessageEnums.INVALID_INPUT_DATA);
     }
 
     String userName = (String) searchCriteria.getParamValue(LogSearchConstants.USER_NAME);
     if (StringUtils.isBlank(userName)) {
-      throw restErrorUtil.createRESTException("user name was not specified", MessageEnums.INVALID_INPUT_DATA);
+      throw RESTErrorUtil.createRESTException("user name was not specified", MessageEnums.INVALID_INPUT_DATA);
     }
     String filterName = (String) searchCriteria.getParamValue(LogSearchConstants.FILTER_NAME);
     filterName = StringUtils.isBlank(filterName) ? "*" : "*" + filterName + "*";
@@ -168,11 +165,11 @@ public class UserConfigMgr extends MgrBase {
     try {
 
       SolrQuery userConfigQuery = new SolrQuery();
-      queryGenerator.setMainQuery(userConfigQuery, null);
+      SolrUtil.setMainQuery(userConfigQuery, null);
       queryGenerator.setPagination(userConfigQuery, searchCriteria);
       queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.ROW_TYPE, rowType);
       queryGenerator.setSingleORFilter(userConfigQuery, LogSearchConstants.USER_NAME, userName, LogSearchConstants.SHARE_NAME_LIST, userName);
-      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.FILTER_NAME, solrUtil.makeSearcableString(filterName));
+      queryGenerator.setSingleIncludeFilter(userConfigQuery, LogSearchConstants.FILTER_NAME, SolrUtil.makeSearcableString(filterName));
 
       if (StringUtils.isBlank(searchCriteria.getSortBy())) {
         searchCriteria.setSortBy(LogSearchConstants.FILTER_NAME);
@@ -214,7 +211,7 @@ public class UserConfigMgr extends MgrBase {
     } catch (SolrException | SolrServerException | IOException e) {
       // do nothing
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
 
     return convertObjToString(userConfigList);
@@ -233,14 +230,14 @@ public class UserConfigMgr extends MgrBase {
       userFilter = userConfigSolrDao.getUserFilter();
     } catch (SolrServerException | IOException e) {
       logger.error(e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return convertObjToString(userFilter);
   }
 
   public String saveUserFiter(String json) {
     if (!StringUtils.isBlank(json)) {
-      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) jsonUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
+      VLogfeederFilterWrapper logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
       try {
         if (logfeederFilterWrapper == null) {
           logger.error(json + " is a invalid json");
@@ -248,7 +245,7 @@ public class UserConfigMgr extends MgrBase {
         userConfigSolrDao.saveUserFilter(logfeederFilterWrapper);
       } catch (SolrException | SolrServerException | IOException e) {
         logger.error("user config not able to save", e);
-        throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
       }
     }
     return getUserFilter();
@@ -258,8 +255,8 @@ public class UserConfigMgr extends MgrBase {
     List<String> userList = new ArrayList<String>();
     try {
       SolrQuery userListQuery = new SolrQuery();
-      queryGenerator.setMainQuery(userListQuery, null);
-      queryGenerator.setFacetField(userListQuery, LogSearchConstants.USER_NAME);
+      SolrUtil.setMainQuery(userListQuery, null);
+      SolrUtil.setFacetField(userListQuery, LogSearchConstants.USER_NAME);
       QueryResponse queryResponse = userConfigSolrDao.process(userListQuery);
       if (queryResponse == null) {
         return convertObjToString(userList);
@@ -271,7 +268,7 @@ public class UserConfigMgr extends MgrBase {
       }
     } catch (SolrException | SolrServerException | IOException e) {
       logger.warn("Error getting all users.", e);
-      throw restErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR.getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
     }
     return convertObjToString(userList);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
index 0c8be45..16cf932 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGeneration.java
@@ -25,12 +25,14 @@ import java.util.List;
 import java.util.Locale;
 import java.util.regex.Pattern;
 
+import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.manager.MgrBase.LogType;
-import org.apache.ambari.logsearch.util.ConfigUtil;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
@@ -87,7 +89,7 @@ public class QueryGeneration extends QueryGenerationBase {
       return advSolrQuery;
     }
 
-    setMainQuery(solrQuery, givenQuery);
+    SolrUtil.setMainQuery(solrQuery, givenQuery);
 
     setSingleRangeFilter(solrQuery, LogSearchConstants.LOGTIME, startTime, endTime);
     addFilter(solrQuery, selectedComp, LogSearchConstants.SOLR_COMPONENT, Condition.OR);
@@ -104,15 +106,15 @@ public class QueryGeneration extends QueryGenerationBase {
     setFilterClauseWithFieldName(solrQuery, globalExcludeComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
     setFilterClauseWithFieldName(solrQuery, unselectedComp, LogSearchConstants.SOLR_COMPONENT, LogSearchConstants.MINUS_OPERATOR, Condition.AND);
 
-    urlHostName = solrUtil.escapeQueryChars(urlHostName);
+    urlHostName = SolrUtil.escapeQueryChars(urlHostName);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_HOST, urlHostName);
-    urlComponentName = solrUtil.escapeQueryChars(urlComponentName);
+    urlComponentName = SolrUtil.escapeQueryChars(urlComponentName);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_COMPONENT, urlComponentName);
 
     setPagination(solrQuery, searchCriteria);
     setSortOrderDefaultServiceLog(solrQuery, searchCriteria);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.BUNDLE_ID, bundleId);
-    file_name = solrUtil.escapeQueryChars(file_name);
+    file_name = SolrUtil.escapeQueryChars(file_name);
     setSingleIncludeFilter(solrQuery, LogSearchConstants.SOLR_PATH, file_name);
     setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.INCLUDE_QUERY, LogSearchConstants.INCLUDE_QUERY, logType);
     setUserSpecificFilter(searchCriteria, solrQuery, LogSearchConstants.EXCLUDE_QUERY, LogSearchConstants.EXCLUDE_QUERY, logType);
@@ -150,10 +152,10 @@ public class QueryGeneration extends QueryGenerationBase {
     List<String> conditionQuries = new ArrayList<String>();
     List<String> referalConditionQuries = new ArrayList<String>();
     List<String> elments = new ArrayList<String>();
-    List<HashMap<String, Object>> queryList = jsonUtil.jsonToMapObjectList(queryString);
+    List<HashMap<String, Object>> queryList = JSONUtil.jsonToMapObjectList(queryString);
     if (queryList != null && queryList.size() > 0) {
       if (!StringUtils.isBlank(columnQuery) && !columnQuery.equals(queryString) && !paramName.equals(LogSearchConstants.EXCLUDE_QUERY)) {
-        List<HashMap<String, Object>> columnQueryList = jsonUtil.jsonToMapObjectList(columnQuery);
+        List<HashMap<String, Object>> columnQueryList = JSONUtil.jsonToMapObjectList(columnQuery);
         if (columnQueryList != null && columnQueryList.size() > 0) {
           queryList.addAll(columnQueryList);
         }
@@ -255,7 +257,7 @@ public class QueryGeneration extends QueryGenerationBase {
       fieldType = serviceLogsSolrDao.schemaFieldsNameMap.get(key);
       solrDaoBase = serviceLogsSolrDao;
       if (key.equalsIgnoreCase(LogSearchConstants.SOLR_LOG_MESSAGE)) {
-        return solrUtil.escapeForLogMessage(key, str);
+        return SolrUtil.escapeForLogMessage(key, str);
       }
       break;
     default:
@@ -264,7 +266,7 @@ public class QueryGeneration extends QueryGenerationBase {
       fieldType = null;
     }
     if (!StringUtils.isBlank(fieldType)) {
-      if (solrUtil.isSolrFieldNumber(fieldType, solrDaoBase)) {
+      if (SolrUtil.isSolrFieldNumber(fieldType, solrDaoBase)) {
         String value = putEscapeCharacterForNumber(str, fieldType,solrDaoBase);
         if (!StringUtils.isBlank(value)) {
           return key + ":" + value;
@@ -272,9 +274,9 @@ public class QueryGeneration extends QueryGenerationBase {
           return null;
         }
       } else if (checkTokenizer(fieldType, StandardTokenizerFactory.class,solrDaoBase)) {
-        return key + ":" + solrUtil.escapeForStandardTokenizer(str);
+        return key + ":" + SolrUtil.escapeForStandardTokenizer(str);
       } else if (checkTokenizer(fieldType, KeywordTokenizerFactory.class,solrDaoBase)|| "string".equalsIgnoreCase(fieldType)) {
-        return key + ":" + solrUtil.makeSolrSearchStringWithoutAsterisk(str);
+        return key + ":" + SolrUtil.makeSolrSearchStringWithoutAsterisk(str);
       } else if (checkTokenizer(fieldType, PathHierarchyTokenizerFactory.class,solrDaoBase)) {
         return key + ":" + str;
       }
@@ -296,7 +298,7 @@ public class QueryGeneration extends QueryGenerationBase {
 
   private String parseInputValueAsPerFieldType(String str,String fieldType,SolrDaoBase solrDaoBase ) {
     try {
-      HashMap<String, Object> fieldTypeInfoMap= solrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
+      HashMap<String, Object> fieldTypeInfoMap= SolrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
       String className = (String) fieldTypeInfoMap.get("class");
       if( className.equalsIgnoreCase(TrieDoubleField.class.getSimpleName())){
         return ""+ Double.parseDouble(str);
@@ -314,7 +316,7 @@ public class QueryGeneration extends QueryGenerationBase {
   }
 
   private String getOriginalValue(String name, String value) {
-    String solrValue = PropertiesUtil.getProperty(name);
+    String solrValue = PropertiesHelper.getProperty(name);
     if (StringUtils.isBlank(solrValue)) {
       return value;
     }
@@ -347,10 +349,10 @@ public class QueryGeneration extends QueryGenerationBase {
     String originalKey;
     switch (logType) {
     case AUDIT:
-      originalKey = ConfigUtil.auditLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
+      originalKey = ConfigHelper.auditLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
       break;
     case SERVICE:
-      originalKey = ConfigUtil.serviceLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
+      originalKey = ConfigHelper.serviceLogsColumnMapping.get(key + LogSearchConstants.UI_SUFFIX);
       break;
     default:
       originalKey = null;
@@ -362,7 +364,7 @@ public class QueryGeneration extends QueryGenerationBase {
   }
   
   private boolean checkTokenizer(String fieldType, Class tokenizerFactoryClass, SolrDaoBase solrDaoBase) {
-    HashMap<String, Object> fieldTypeMap = solrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
+    HashMap<String, Object> fieldTypeMap = SolrUtil.getFieldTypeInfoMap(fieldType,solrDaoBase);
     HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeMap.get("analyzer");
     if (analyzer != null) {
       HashMap<String, Object> tokenizerMap = (HashMap<String, Object>) analyzer.get("tokenizer");

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
index ca6df65..77d4969 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/query/QueryGenerationBase.java
@@ -26,8 +26,6 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.dao.AuditSolrDao;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
-import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.QueryBase;
 import org.apache.ambari.logsearch.util.SolrUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
@@ -37,21 +35,15 @@ import org.springframework.beans.factory.annotation.Autowired;
 
 import com.google.gson.Gson;
 
-public abstract class QueryGenerationBase extends QueryBase {
+public abstract class QueryGenerationBase {
 
   private static final Logger logger = Logger.getLogger(QueryGenerationBase.class);
-
-  @Autowired
-  protected SolrUtil solrUtil;
   
   @Autowired
   protected AuditSolrDao auditSolrDao;
   
   @Autowired
   protected ServiceLogsSolrDao serviceLogsSolrDao;
-  
-  @Autowired
-  protected JSONUtil jsonUtil;
 
   public static enum Condition {
     OR, AND
@@ -69,9 +61,9 @@ public abstract class QueryGenerationBase extends QueryBase {
       for (String temp : msgList) {
         count += 1;
         if (LogSearchConstants.SOLR_LOG_MESSAGE.equalsIgnoreCase(messageField)) {
-          queryMsg.append(" " + operator + solrUtil.escapeForLogMessage(messageField, temp));
+          queryMsg.append(" " + operator + SolrUtil.escapeForLogMessage(messageField, temp));
         } else {
-          temp = solrUtil.escapeForStandardTokenizer(temp);
+          temp = SolrUtil.escapeForStandardTokenizer(temp);
           if(temp.startsWith("\"") && temp.endsWith("\"")){
             temp = temp.substring(1);
             temp = temp.substring(0, temp.length()-1);
@@ -95,9 +87,9 @@ public abstract class QueryGenerationBase extends QueryBase {
       String[] arrayOfSepratedString = commaSepratedString.split(LogSearchConstants.LIST_SEPARATOR);
       String filterQuery = null;
       if (Condition.OR.equals(condition)) {
-        filterQuery = solrUtil.orList(operator + field, arrayOfSepratedString,"");
+        filterQuery = SolrUtil.orList(operator + field, arrayOfSepratedString,"");
       } else if (Condition.AND.equals(condition)) {
-        filterQuery = solrUtil.andList(operator + field, arrayOfSepratedString,"");
+        filterQuery = SolrUtil.andList(operator + field, arrayOfSepratedString,"");
       }else{
         logger.warn("Not a valid condition :" + condition.name());
       }
@@ -207,15 +199,15 @@ public abstract class QueryGenerationBase extends QueryBase {
     Integer maxRows = null;
     try {
       startIndex = (Integer) searchCriteria.getStartIndex();
-      setStart(solrQuery, startIndex);
+      SolrUtil.setStart(solrQuery, startIndex);
     } catch (ClassCastException e) {
-      setStart(solrQuery, 0);
+      SolrUtil.setStart(solrQuery, 0);
     }
     try {
       maxRows = (Integer) searchCriteria.getMaxRows();
-      setRowCount(solrQuery, maxRows);
+      SolrUtil.setRowCount(solrQuery, maxRows);
     } catch (ClassCastException e) {
-      setRowCount(solrQuery, 10);
+      SolrUtil.setRowCount(solrQuery, 10);
     }
 
     if (startIndex != null && maxRows != null)
@@ -269,9 +261,9 @@ public abstract class QueryGenerationBase extends QueryBase {
       String[] values = paramValue.split(LogSearchConstants.LIST_SEPARATOR);
       switch (condition) {
       case OR:
-        return solrUtil.orList(solrFieldName, values,"");
+        return SolrUtil.orList(solrFieldName, values,"");
       case AND:
-        return solrUtil.andList(solrFieldName, values, "");
+        return SolrUtil.andList(solrFieldName, values, "");
       default:
         logger.error("Invalid condition " + condition.name());
       }
@@ -292,10 +284,10 @@ public abstract class QueryGenerationBase extends QueryBase {
       String query;;
       switch (condition) {
       case OR:
-        query = solrUtil.orList(solrFieldName, arr,"");
+        query = SolrUtil.orList(solrFieldName, arr,"");
         break;
       case AND:
-        query = solrUtil.andList(solrFieldName, arr, "");
+        query = SolrUtil.andList(solrFieldName, arr, "");
         break;
       default:
         query=null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
index a906ceb..7a27e1c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/ServiceLogsREST.java
@@ -32,7 +32,6 @@ import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.LogsMgr;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.view.VCountList;
 import org.apache.ambari.logsearch.view.VNameValueList;
 import org.apache.ambari.logsearch.view.VNodeList;
@@ -54,9 +53,6 @@ public class ServiceLogsREST {
   @Autowired
   LogsMgr logMgr;
 
-  @Autowired
-  RESTErrorUtil restErrorUtil;
-
   @GET
   @Produces({"application/json"})
   @ApiOperation(SEARCH_LOGS_OD)

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
index a79a737..699dc17 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
@@ -36,7 +36,6 @@ import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.UserConfigMgr;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.view.VUserConfig;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
@@ -52,9 +51,6 @@ import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigOperationDe
 public class UserConfigREST {
 
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  @Autowired
   UserConfigMgr userConfigMgr;
 
   @POST

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
index c644cd5..53e2ca2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
@@ -27,9 +27,9 @@ import java.util.TreeMap;
 
 import javax.management.MalformedObjectNameException;
 
+import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.solr.AmbariSolrCloudClient;
 import org.apache.ambari.logsearch.solr.AmbariSolrCloudClientBuilder;
-import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
@@ -172,15 +172,15 @@ public class SolrMetricsLoader extends TimerTask {
 
   public static void startSolrMetricsLoaderTasks() {
     try {
-      String collectorHosts = PropertiesUtil.getProperty("logsearch.solr.metrics.collector.hosts");
+      String collectorHosts = PropertiesHelper.getProperty("logsearch.solr.metrics.collector.hosts");
       if (StringUtils.isEmpty(collectorHosts)) {
         LOG.warn("No Ambari Metrics service is available, no Solr metrics will be loaded!");
         return;
       }
 
-      int solrJmxPort = PropertiesUtil.getIntProperty("logsearch.solr.jmx.port");
+      int solrJmxPort = PropertiesHelper.getIntProperty("logsearch.solr.jmx.port");
 
-      String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
+      String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
       AmbariSolrCloudClient ambariSolrCloudClient = new AmbariSolrCloudClientBuilder()
           .withZkConnectString(zkConnectString)
           .build();

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
index 814b8ee..bd6cfbb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/BizUtil.java
@@ -43,13 +43,15 @@ import org.apache.log4j.Logger;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.util.SimpleOrderedMap;
-import org.springframework.stereotype.Component;
 
-@Component
 public class BizUtil {
   private static final Logger logger = Logger.getLogger(BizUtil.class);
 
-  public String convertObjectToNormalText(SolrDocumentList docList) {
+  private BizUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static String convertObjectToNormalText(SolrDocumentList docList) {
     String textToSave = "";
     HashMap<String, String> blankFieldsMap = new HashMap<String, String>();
     if (docList == null){
@@ -76,7 +78,7 @@ public class BizUtil {
 
     for (String field : fieldsForBlankCaculation) {
       if (!StringUtils.isBlank(field)) {
-      blankFieldsMap.put(field, addBlanksToString(maxLengthOfField - field.length(), field));
+        blankFieldsMap.put(field, StringUtils.rightPad(field, maxLengthOfField - field.length()));
       }
     }
 
@@ -114,7 +116,7 @@ public class BizUtil {
     return textToSave;
   }
 
-  public VSummary buildSummaryForLogFile(SolrDocumentList docList) {
+  public static VSummary buildSummaryForLogFile(SolrDocumentList docList) {
     VSummary vsummary = new VSummary();
     if (CollectionUtils.isEmpty(docList)) {
       return vsummary;
@@ -170,19 +172,8 @@ public class BizUtil {
     return vsummary;
   }
 
-  private String addBlanksToString(int count, String field) {
-    if (StringUtils.isBlank(field)) {
-      return field;
-    }
-    if (count > 0) {
-      return String.format("%-" + count + "s", field);
-    }
-    return field;
-
-  }
-
   @SuppressWarnings({"unchecked", "rawtypes"})
-  public VBarDataList buildSummaryForTopCounts(SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
+  public static VBarDataList buildSummaryForTopCounts(SimpleOrderedMap<Object> jsonFacetResponse,String innerJsonKey,String outerJsonKey) {
 
     VBarDataList vBarDataList = new VBarDataList();
 
@@ -240,7 +231,7 @@ public class BizUtil {
     return vBarDataList;
   }
   
-  public HashMap<String, String> sortHashMapByValues(HashMap<String, String> passedMap) {
+  public static HashMap<String, String> sortHashMapByValues(HashMap<String, String> passedMap) {
     if (passedMap == null ) {
       return passedMap;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
index 8ec04f8..1cfe469 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/CommonUtil.java
@@ -23,6 +23,10 @@ import java.security.SecureRandom;
 import org.springframework.security.authentication.encoding.Md5PasswordEncoder;
 
 public class CommonUtil {
+  private CommonUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
   private static SecureRandom secureRandom = new SecureRandom();
   private static int counter = 0;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
deleted file mode 100644
index 88f92a2..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ConfigUtil.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ambari.logsearch.util;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.manager.MgrBase;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.log4j.Logger;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-
-public class ConfigUtil {
-  private static final Logger logger = Logger.getLogger(MgrBase.class);
-
-  public static HashMap<String, String> serviceLogsColumnMapping = new HashMap<String, String>();
-
-  public static HashMap<String, String> auditLogsColumnMapping = new HashMap<String, String>();
-
-
-  public static void initializeApplicationConfig() {
-    initializeColumnMapping();
-  }
-
-  private static void intializeUISolrColumnMapping(String columnMappingArray[], HashMap<String, String> columnMappingMap) {
-
-    if (columnMappingArray != null && columnMappingArray.length > 0) {
-      for (String columnMapping : columnMappingArray) {
-        String mapping[] = columnMapping.split(":");
-        if (mapping.length > 1) {
-          String solrField = mapping[0];
-          String uiField = mapping[1];
-          
-          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX, uiField);
-          columnMappingMap.put(uiField + LogSearchConstants.UI_SUFFIX, solrField);
-        }
-      }
-    }
-  }
-  private static void initializeColumnMapping() {
-    String serviceLogsColumnMappingArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.service.logs.column.mapping");
-    String auditLogsColumnMappingArray[] = PropertiesUtil.getPropertyStringList("logsearch.solr.audit.logs.column.mapping");
-
-    // Initializing column mapping for Service Logs
-    intializeUISolrColumnMapping(serviceLogsColumnMappingArray, serviceLogsColumnMapping);
-
-    // Initializing column mapping for Audit Logs
-    intializeUISolrColumnMapping(auditLogsColumnMappingArray, auditLogsColumnMapping);
-  }
-
-  public static void extractSchemaFieldsName(String responseString, HashMap<String, String> schemaFieldsNameMap,
-      HashMap<String, String> schemaFieldTypeMap) {
-    try {
-      JSONObject jsonObject = new JSONObject(responseString);
-      JSONObject schemajsonObject = jsonObject.getJSONObject("schema");
-      JSONArray jsonArrayList = schemajsonObject.getJSONArray("fields");
-      JSONArray fieldTypeJsonArray = schemajsonObject
-          .getJSONArray("fieldTypes");
-      if (jsonArrayList == null) {
-        return;
-      }
-      if (fieldTypeJsonArray == null) {
-        return;
-      }
-      HashMap<String, String> _schemaFieldTypeMap = new HashMap<String, String>();
-      HashMap<String, String> _schemaFieldsNameMap = new HashMap<String, String>();
-      for (int i = 0; i < fieldTypeJsonArray.length(); i++) {
-        JSONObject typeObject = fieldTypeJsonArray.getJSONObject(i);
-        String name = typeObject.getString("name");
-        String fieldTypeJson = typeObject.toString();
-        _schemaFieldTypeMap.put(name, fieldTypeJson);
-      }
-
-      for (int i = 0; i < jsonArrayList.length(); i++) {
-        JSONObject explrObject = jsonArrayList.getJSONObject(i);
-        String name = explrObject.getString("name");
-        String type = explrObject.getString("type");
-        if (!name.contains("@") && !name.startsWith("_")
-            && !name.contains("_md5") && !name.contains("_ms")
-            && !name.contains(LogSearchConstants.NGRAM_SUFFIX)
-            && !name.contains("tags") && !name.contains("_str")) {
-          _schemaFieldsNameMap.put(name, type);
-        }
-      }
-      schemaFieldsNameMap.clear();
-      schemaFieldTypeMap.clear();
-      schemaFieldsNameMap.putAll(_schemaFieldsNameMap);
-      schemaFieldTypeMap.putAll(_schemaFieldTypeMap);
-    } catch (Exception e) {
-      logger.error(e + "Credentials not specified in logsearch.properties " + MessageEnums.ERROR_SYSTEM);
-    }
-  }
-
-  @SuppressWarnings("rawtypes")
-  public static void getSchemaFieldsName(String excludeArray[], List<String> fieldNames, SolrDaoBase solrDaoBase) {
-    if (!solrDaoBase.schemaFieldsNameMap.isEmpty()) {
-      Iterator iteratorSechmaFieldsName = solrDaoBase.schemaFieldsNameMap.entrySet().iterator();
-      while (iteratorSechmaFieldsName.hasNext()) {
-        Map.Entry fieldName = (Map.Entry) iteratorSechmaFieldsName.next();
-        String field = "" + fieldName.getKey();
-        if (!isExclude(field, excludeArray)) {
-          fieldNames.add(field);
-        }
-      }
-    }
-  }
-
-  private static boolean isExclude(String name, String excludeArray[]) {
-    if (!ArrayUtils.isEmpty(excludeArray)) {
-      for (String exclude : excludeArray) {
-        if (name.equals(exclude)){
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
index 516d828..0de0dbc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/DateUtil.java
@@ -24,26 +24,24 @@ import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
-import java.util.GregorianCalendar;
 import java.util.Locale;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.time.DateUtils;
 import org.apache.log4j.Logger;
-import org.springframework.stereotype.Component;
 
-@Component
 public class DateUtil {
 
   private static final Logger logger = Logger.getLogger(DateUtil.class);
 
   private DateUtil() {
-
+    throw new UnsupportedOperationException();
   }
 
-  public String addOffsetToDate(String date, Long utcOffset, String dateFormat) {
+  public static String addOffsetToDate(String date, Long utcOffset, String dateFormat) {
     if (StringUtils.isBlank(date)) {
       logger.debug("input date is empty or null.");
       return null;
@@ -64,7 +62,7 @@ public class DateUtil {
       }
       SimpleDateFormat formatter = new SimpleDateFormat(dateFormat, Locale.ENGLISH);
       Date startDate = formatter.parse(modifiedDate);
-      long toWithOffset = getTimeWithOffset(startDate, utcOffset);
+      long toWithOffset = startDate.getTime() + TimeUnit.MINUTES.toMillis(utcOffset);
       Calendar calendar = Calendar.getInstance();
       calendar.setTimeInMillis(toWithOffset);
       retDate = formatter.format(calendar.getTime());
@@ -74,25 +72,17 @@ public class DateUtil {
     return retDate;
   }
 
-  private long getTimeWithOffset(Date date, Long utcOffset) {
-    return date.getTime() + TimeUnit.MINUTES.toMillis(utcOffset);
-  }
-
-  public String getCurrentDateInString() {
+  public static String getCurrentDateInString() {
     DateFormat df = new SimpleDateFormat("MM-dd-yyyy HH:mm:ss", Locale.ENGLISH);
     Date today = Calendar.getInstance().getTime();
     return df.format(today);
   }
 
-  public Date getTodayFromDate() {
-    Calendar c = new GregorianCalendar();
-    c.set(Calendar.HOUR_OF_DAY, 0);
-    c.set(Calendar.MINUTE, 0);
-    c.set(Calendar.SECOND, 0);
-    return c.getTime();
+  public static Date getTodayFromDate() {
+    return DateUtils.truncate(new Date(), Calendar.DATE);
   }
 
-  public String convertGivenDateFormatToSolrDateFormat(Date date) throws ParseException {
+  public static String convertGivenDateFormatToSolrDateFormat(Date date) throws ParseException {
     String time = date.toString();
     SimpleDateFormat input = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.ENGLISH);
     SimpleDateFormat output = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z, Locale.ENGLISH);
@@ -103,7 +93,7 @@ public class DateUtil {
     return output.format(d);
   }
 
-  public String convertDateWithMillisecondsToSolrDate(Date date) {
+  public static String convertDateWithMillisecondsToSolrDate(Date date) {
     if (date == null) {
       return "";
     }
@@ -114,7 +104,7 @@ public class DateUtil {
     return formatter.format(date);
   }
 
-  public String convertSolrDateToNormalDateFormat(long d, long utcOffset) throws ParseException {
+  public static String convertSolrDateToNormalDateFormat(long d, long utcOffset) throws ParseException {
     Date date = new Date(d);
     SimpleDateFormat formatter = new SimpleDateFormat(LogSearchConstants.SOLR_DATE_FORMAT, Locale.ENGLISH);
     TimeZone timeZone = TimeZone.getTimeZone("GMT");
@@ -124,7 +114,7 @@ public class DateUtil {
 
   }
 
-  public boolean isDateValid(String value) {
+  public static boolean isDateValid(String value) {
     if (StringUtils.isBlank(value)) {
       return false;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/42ad4024/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
deleted file mode 100644
index 882a8bd..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/ExternalServerClient.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.util;
-
-import java.util.List;
-import java.util.Map;
-
-import javax.annotation.PostConstruct;
-import javax.ws.rs.client.Invocation;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-
-import org.apache.ambari.logsearch.web.security.LogsearchAbstractAuthenticationProvider;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.glassfish.jersey.client.JerseyClient;
-import org.glassfish.jersey.client.JerseyClientBuilder;
-import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
-import org.glassfish.jersey.filter.LoggingFilter;
-import org.springframework.stereotype.Component;
-
-/**
- * Layer to send REST request to External server using jersey client
- */
-@Component
-public class ExternalServerClient {
-  private static Logger LOG = Logger.getLogger(ExternalServerClient.class);
-  private static final ThreadLocal<JerseyClient> localJerseyClient = new ThreadLocal<JerseyClient>(){
-    @Override
-    protected JerseyClient initialValue() {
-      return JerseyClientBuilder.createClient();
-    }
-  };
-  private String hostURL = "http://host:ip";// default
-  private boolean enableLog = false;// default
-
-  @PostConstruct
-  public void initialization() {
-    hostURL = PropertiesUtil.getProperty(
-        LogsearchAbstractAuthenticationProvider.AUTH_METHOD_PROP_START_WITH
-            + "external_auth.host_url", hostURL);
-  }
-
-  /**
-   * Send GET request to an external server
-   */
-  @SuppressWarnings({ "unchecked", "rawtypes" })
-  public Object sendGETRequest(String url, Class klass, MultivaluedMap<String, String> queryParam,
-                               String username, String password)
-      throws Exception {
-    url = hostURL + url;
-    JerseyClient client = localJerseyClient.get();
-    HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder().build();
-
-    client.register(authFeature);
-    if (enableLog) {
-      client.register(LoggingFilter.class);
-    }
-
-    WebTarget target = client.target(url);
-    LOG.debug("URL: " + url);
-    for (Map.Entry<String, List<String>> entry : queryParam.entrySet()) {
-      target = target.queryParam(entry.getKey(), entry.getValue());
-      LOG.debug(
-        String.format("Query parameter: name - %s  ; value - %s ;" + entry.getKey(), StringUtils.join(entry.getValue(),',')));
-    }
-    target
-      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_USERNAME, username)
-      .property(HttpAuthenticationFeature.HTTP_AUTHENTICATION_BASIC_PASSWORD, password);
-    Invocation.Builder invocationBuilder =  target.request(MediaType.APPLICATION_JSON_TYPE);
-    try {
-      return invocationBuilder.get().readEntity(klass);
-    } catch (Exception e) {
-      throw new Exception(e.getCause());
-    } finally {
-      localJerseyClient.remove();
-    }
-  }
-}


[05/50] [abbrv] ambari git commit: AMBARI-18212. Log entry missing in the preview log modal window (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
AMBARI-18212. Log entry missing in the preview log modal window (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/00b1ac74
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/00b1ac74
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/00b1ac74

Branch: refs/heads/branch-dev-logsearch
Commit: 00b1ac74e4ff6c1da14481cc78986fd7955c0e4e
Parents: 93fefb4
Author: oleewere <ol...@gmail.com>
Authored: Mon Aug 22 11:00:31 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:58 2016 +0200

----------------------------------------------------------------------
 .../src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/00b1ac74/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
index 451fac1..a2c15f4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/LogsMgr.java
@@ -1886,7 +1886,7 @@ public class LogsMgr extends MgrBase {
     String order1 = LogSearchConstants.LOGTIME + " "
       + LogSearchConstants.DESCENDING_ORDER;
     String order2 = LogSearchConstants.SEQUNCE_ID + " "
-      + LogSearchConstants.ASCENDING_ORDER;
+      + LogSearchConstants.DESCENDING_ORDER;
     List<String> sortOrder = new ArrayList<String>();
     sortOrder.add(order1);
     sortOrder.add(order2);


[38/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
index f58652c..f77bec0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
@@ -106,7 +106,6 @@
   <field name="file" type="key_lower_case" multiValued="false"/>
   <field name="log_message" type="key_lower_case" multiValued="false" omitNorms="false"/>
   <field name="logfile_line_number" type="tint" omitNorms="false"/>
-  <!-- <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/> -->
   <field name="message_md5" type="string" multiValued="false"/>
   <field name="type" type="key_lower_case" multiValued="false"/>
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
index 3b4cf19..c6f498b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/hadoop_logs/conf/managed-schema
@@ -101,7 +101,6 @@
   <field name="logger_name" type="key_lower_case" multiValued="false"/>
   <field name="logtime" type="tdate" multiValued="false"  docValues="true"/>
   <field name="logtype" type="key_lower_case" multiValued="false"/>
-  <!-- <field name="message" type="text_std_token_lower_case" indexed="true" stored="true"/> -->
   <field name="message_md5" type="string" multiValued="false"/>
   <field name="method" type="key_lower_case" multiValued="false" omitNorms="false"/>
   <field name="path" type="key_lower_case" multiValued="false"/>
@@ -117,28 +116,5 @@
   <dynamicField name='key_*' type="key_lower_case" multiValued="false" stored="false"/>
   <dynamicField name="ws_*" type="text_ws" multiValued="false" omitNorms="false" stored="false"/>
   <copyField source="log_message" dest="key_log_message"/>
-  <!-- <copyField source="log_message" dest="ws_log_message"/> -->
 
-  
-  <!-- Copy Fields-->
-  <!-- ngram fields -->
-  <!-- Whenever using a copy field provide following structure "ngram_"<OriginalFieldName> --> 
-<!-- Commenting till we test properly. Now it is not used and it is taking unnecessary cpu, memory and disk space
-  <copyField source="bundle_id" dest="ngram_bundle_id"/>
-  <copyField source="case_id" dest="ngram_case_id"/>
-  <copyField source="cluster" dest="ngram_cluster"/>
-  <copyField source="file" dest="ngram_file"/>
-  <copyField source="host" dest="ngram_host"/>
-  <copyField source="level" dest="ngram_level"/>
-  <copyField source="log_message" dest="ngram_log_message"/>
-  <copyField source="logger_name" dest="ngram_logger_name"/>
-  <copyField source="logtype" dest="ngram_logtype"/>
-  <copyField source="method" dest="ngram_method"/>
-  <copyField source="path" dest="ngram_path"/>
-  <copyField source="thread_name" dest="ngram_thread_name"/>
-  <copyField source="type" dest="ngram_type"/>
-  
--->
-
-  
 </schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/README.md
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/README.md b/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/README.md
deleted file mode 100644
index 3cc4915..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/README.md
+++ /dev/null
@@ -1,35 +0,0 @@
-<!--
-{% comment %}
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to you under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-{% endcomment %}
--->
-
-This folder contains scripts to configure and run SolrCloud for development. 
-
-This helps in seting up SolrCloud using embedded zookeeper.
-
-##Setup Solr Collections
-```./update_config.sh <path to solr install folder>
-```
-
-
-##Run Solr
-```./restart_solr.sh
-```
-
-##Delete collections
-```./reset_collections.sh
-```
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/reset_collections.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/reset_collections.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/reset_collections.sh
deleted file mode 100755
index 7c742c0..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/reset_collections.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script is used to delete all the documents in Solr
-host_port=localhost:8983
-if [ $# -eq 1 ]; then
-    host_port=$1
-fi
-set -x
-curl http://${host_port}/solr/hadoop_logs/update --data '<delete><query>*:*</query></delete>' -H 'Content-type:text/xml; charset=utf-8'
-curl http://${host_port}/solr/hadoop_logs/update --data '<commit/>' -H 'Content-type:text/xml; charset=utf-8'
-
-curl http://${host_port}/solr/audit_logs/update --data '<delete><query>*:*</query></delete>' -H 'Content-type:text/xml; charset=utf-8'
-curl http://${host_port}/solr/audit_logs/update --data '<commit/>' -H 'Content-type:text/xml; charset=utf-8'
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/restart_solr.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/restart_solr.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/restart_solr.sh
deleted file mode 100755
index bd436bb..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/restart_solr.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-function usage {
-    echo "Usage: $0 <Solr Install Folder>"
-}
-
-if [ $# -ne 1 ]; then
-    usage
-    exit 1
-fi
-
-SOLR_INSTALL=$1
-if [ -x $SOLR_INSTALL/bin/solr ]; then
-    SOLR_BIN=$SOLR_INSTALL/bin/solr
-else
-    echo "ERROR: Invalid Solr install folder $SOLR_INSTALL"
-    usage
-    exit 1
-fi
-
-set -x
-$SOLR_BIN restart -c

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/update_config.sh
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/update_config.sh b/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/update_config.sh
deleted file mode 100755
index 93e3496..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/dev/solrcloud/update_config.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-function usage {
-    echo "Usage: $0 <Solr Install Folder> [zk_hosts]"
-}
-
-if [ $# -lt 1 ]; then
-    usage
-    exit 1
-fi
-
-curr_dir=`pwd`
-cd `dirname $0`; script_dir=`pwd`; cd $curr_dir
-
-SOLR_INSTALL=$1
-if [ -x $SOLR_INSTALL/server/scripts/cloud-scripts/zkcli.sh ]; then
-    ZK_CLI=$SOLR_INSTALL/server/scripts/cloud-scripts/zkcli.sh
-else
-    echo "ERROR: Invalid Solr install folder $SOLR_INSTALL"
-    usage
-    exit 1
-fi
-
-zk_hosts="localhost:9983"
-if [ $# -eq 2 ]; then
-    zk_hosts=$2
-fi
-
-
-CONFIGSET_FOLDER=$script_dir/../../configsets
-
-set -x
-$ZK_CLI -zkhost $zk_hosts -cmd upconfig -confdir $CONFIGSET_FOLDER/audit_logs/conf -confname audit_logs
-$ZK_CLI -zkhost $zk_hosts -cmd upconfig -confdir $CONFIGSET_FOLDER/hadoop_logs/conf -confname hadoop_logs
-$ZK_CLI -zkhost $zk_hosts -cmd upconfig -confdir $CONFIGSET_FOLDER/history/conf -confname history

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 15355a1..27b943a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -92,7 +92,6 @@ public class LogSearch {
     logger.debug(server.dump());
     logger
         .debug("==============================================================================");
-    ConfigHelper.initializeApplicationConfig();
     server.join();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
index edb4ffa..a3a71ea 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ConfigHelper.java
@@ -25,48 +25,18 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.manager.MgrBase;
+import org.apache.ambari.logsearch.manager.ManagerBase;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.log4j.Logger;
 import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 
 public class ConfigHelper {
-  private static final Logger logger = Logger.getLogger(MgrBase.class);
-
-  public static HashMap<String, String> serviceLogsColumnMapping = new HashMap<String, String>();
-  public static HashMap<String, String> auditLogsColumnMapping = new HashMap<String, String>();
+  private static final Logger logger = Logger.getLogger(ManagerBase.class);
 
   private ConfigHelper() {
     throw new UnsupportedOperationException();
   }
-  
-  public static void initializeApplicationConfig() {
-    String serviceLogsColumnMappingArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.service.logs.column.mapping");
-    String auditLogsColumnMappingArray[] = PropertiesHelper.getPropertyStringList("logsearch.solr.audit.logs.column.mapping");
-
-    // Initializing column mapping for Service Logs
-    intializeUISolrColumnMapping(serviceLogsColumnMappingArray, serviceLogsColumnMapping);
-
-    // Initializing column mapping for Audit Logs
-    intializeUISolrColumnMapping(auditLogsColumnMappingArray, auditLogsColumnMapping);
-  }
-
-  private static void intializeUISolrColumnMapping(String columnMappingArray[], HashMap<String, String> columnMappingMap) {
-
-    if (columnMappingArray != null && columnMappingArray.length > 0) {
-      for (String columnMapping : columnMappingArray) {
-        String mapping[] = columnMapping.split(":");
-        if (mapping.length > 1) {
-          String solrField = mapping[0];
-          String uiField = mapping[1];
-          
-          columnMappingMap.put(solrField + LogSearchConstants.SOLR_SUFFIX, uiField);
-          columnMappingMap.put(uiField + LogSearchConstants.UI_SUFFIX, solrField);
-        }
-      }
-    }
-  }
 
   public static void extractSchemaFieldsName(String responseString, HashMap<String, String> schemaFieldsNameMap,
       HashMap<String, String> schemaFieldTypeMap) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
index 5235fab..edc78d1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ExternalServerClient.java
@@ -22,11 +22,13 @@ import java.util.List;
 import java.util.Map;
 
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
 import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.MultivaluedMap;
 
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.apache.ambari.logsearch.web.security.LogsearchAbstractAuthenticationProvider;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
@@ -41,6 +43,7 @@ import org.springframework.stereotype.Component;
  */
 @Component
 public class ExternalServerClient {
+
   private static Logger LOG = Logger.getLogger(ExternalServerClient.class);
   private static final ThreadLocal<JerseyClient> localJerseyClient = new ThreadLocal<JerseyClient>(){
     @Override
@@ -48,15 +51,11 @@ public class ExternalServerClient {
       return JerseyClientBuilder.createClient();
     }
   };
-  private String hostURL = "http://host:ip";// default
-  private boolean enableLog = false;// default
 
-  @PostConstruct
-  public void initialization() {
-    hostURL = PropertiesHelper.getProperty(
-        LogsearchAbstractAuthenticationProvider.AUTH_METHOD_PROP_START_WITH
-            + "external_auth.host_url", hostURL);
-  }
+  @Inject
+  private AuthConfig authConfig;
+
+  private boolean enableLog = false;// default
 
   /**
    * Send GET request to an external server
@@ -65,7 +64,7 @@ public class ExternalServerClient {
   public Object sendGETRequest(String url, Class klass, MultivaluedMap<String, String> queryParam,
                                String username, String password)
       throws Exception {
-    url = hostURL + url;
+    url = authConfig.getExternalAuthHostUrl() + url;
     JerseyClient client = localJerseyClient.get();
     HttpAuthenticationFeature authFeature = HttpAuthenticationFeature.basicBuilder().build();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 142b29b..36ecb81 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -118,6 +118,60 @@ public class LogSearchConstants {
   public static final String FACET_RANGE_GAP = "facet.range.gap";
   public static final String FACET_GROUP = "group";
   public static final String FACET_GROUP_MAIN = "group.main";
-  public static final String FACET_GROUP_FIELD = "group.field"; 
-  
+  public static final String FACET_GROUP_FIELD = "group.field";
+
+  // Request params
+  public static final String REQUEST_PARAM_QUERY = "q";
+  public static final String REQUEST_PARAM_XAXIS = "xAxis";
+  public static final String REQUEST_PARAM_YAXIS = "yAxis";
+  public static final String REQUEST_PARAM_STACK_BY = "stackBy";
+  public static final String REQUEST_PARAM_UNIT = "unit";
+  public static final String REQUEST_PARAM_BUNDLE_ID = "bundle_id";
+  public static final String REQUEST_PARAM_START_INDEX = "startIndex";
+  public static final String REQUEST_PARAM_PAGE = "page";
+  public static final String REQUEST_PARAM_PAGE_SIZE = "pageSize";
+  public static final String REQUEST_PARAM_SORT_BY = "sortBy";
+  public static final String REQUEST_PARAM_SORT_TYPE = "sortType";
+  public static final String REQUEST_PARAM_START_TIME = "start_time";
+  public static final String REQUEST_PARAM_END_TIME = "end_time";
+  public static final String REQUEST_PARAM_FROM = "from";
+  public static final String REQUEST_PARAM_TO = "to";
+  public static final String REQUEST_PARAM_FIELD = "field";
+  public static final String REQUEST_PARAM_FORMAT = "format";
+  public static final String REQUEST_PARAM_LAST_PAGE = "lastPage";
+  public static final String REQUEST_PARAM_LOG_TYPE = "logType";
+  public static final String REQUEST_PARAM_COMPONENT = "component";
+  public static final String REQUEST_PARAM_HOST = "host";
+  public static final String REQUEST_PARAM_TAIL_SIZE = "tailSize";
+  public static final String REQUEST_PARAM_COLUMN_QUERY = "columnQuery";
+  public static final String REQUEST_PARAM_I_MESSAGE = "iMessage";
+  public static final String REQUEST_PARAM_G_E_MESSAGE = "gEMessage";
+  public static final String REQUEST_PARAM_MUST_BE = "mustBe";
+  public static final String REQUEST_PARAM_MUST_NOT = "mustNot";
+  public static final String REQUEST_PARAM_INCLUDE_QUERY = "includeQuery";
+  public static final String REQUEST_PARAM_EXCLUDE_QUERY = "excludeQuery";
+  public static final String REQUEST_PARAM_ID = "id";
+  public static final String REQUEST_PARAM_SCROLL_TYPE = "scrollType";
+  public static final String REQUEST_PARAM_NUMBER_ROWS = "numberRows";
+  public static final String REQUEST_PARAM_HOST_LOG_FILE = "host";
+  public static final String REQUEST_PARAM_COMPONENT_LOG_FILE = "component";
+  public static final String REQUEST_PARAM_LEVEL = "level";
+  public static final String REQUEST_PARAM_ADVANCED_SEARCH = "advancedSearch";
+  public static final String REQUEST_PARAM_TREE_PARAMS = "treeParams";
+  public static final String REQUEST_PARAM_E_MESSAGE = "eMessage";
+  public static final String REQUEST_PARAM_G_MUST_NOT = "gMustNot";
+  public static final String REQUEST_PARAM_HOST_NAME = "host_name";
+  public static final String REQUEST_PARAM_COMPONENT_NAME = "component_name";
+  public static final String REQUEST_PARAM_FILE_NAME = "file_name";
+  public static final String REQUEST_PARAM_DATE_RANGE_LABEL = "dateRangeLabel";
+  public static final String REQUEST_PARAM_KEYWORD = "find";
+  public static final String REQUEST_PARAM_SOURCE_LOG_ID = "sourceLogId";
+  public static final String REQUEST_PARAM_KEYWORD_TYPE = "keywordType";
+  public static final String REQUEST_PARAM_TOKEN = "token";
+  public static final String REQUEST_PARAM_USER_ID = "userId";
+  public static final String REQUEST_PARAM_FILTER_NAME = "filterName";
+  public static final String REQUEST_PARAM_ROW_TYPE = "rowType";
+  public static final String REQUEST_PARAM_UTC_OFFSET = "utcOffset";
+
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/Marker.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/Marker.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/Marker.java
new file mode 100644
index 0000000..3e088ba
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/Marker.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+public @interface Marker {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertyMapper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertyMapper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertyMapper.java
new file mode 100644
index 0000000..90e2114
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/PropertyMapper.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import com.google.common.base.Splitter;
+import org.springframework.stereotype.Component;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component("propertyMapper")
+public class PropertyMapper {
+
+  public Map<String, String> map(String property) {
+    return this.map(property, ",");
+  }
+
+  public List<String> list(String property) {
+    return this.list(property, ",");
+  }
+
+  public Map<String, String> solrUiMap(String property) { return this.solrUiMap(property, ","); }
+
+  private List<String> list(String property, String splitter) {
+    return Splitter.on(splitter).omitEmptyStrings().trimResults().splitToList(property);
+  }
+
+  private Map<String, String> map(String property, String splitter) {
+    return Splitter.on(splitter).omitEmptyStrings().trimResults().withKeyValueSeparator(":").split(property);
+  }
+
+  private Map<String, String> solrUiMap(String property, String splitter) {
+    Map<String, String> result = new HashMap<>();
+    Map<String, String> map = this.map(property, splitter);
+    for (Map.Entry<String, String> propEntry : map.entrySet()) {
+      result.put(propEntry.getKey() + LogSearchConstants.SOLR_SUFFIX, propEntry.getValue());
+      result.put(propEntry.getValue() + LogSearchConstants.UI_SUFFIX, propEntry.getKey());
+    }
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
deleted file mode 100644
index 6b74144..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
+++ /dev/null
@@ -1,304 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
-
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.lang.StringEscapeUtils;
-
-public class SearchCriteria {
-  private int startIndex = 0;
-  private int maxRows = Integer.MAX_VALUE;
-  private String sortBy = null;
-  private String sortType = null;
-  private int page = 0;
-
-  private String globalStartTime = null;
-  private String globalEndTime = null;
-
-  private boolean getCount = true;
-  private boolean isDistinct = false;
-  private HashMap<String, Object> paramList = new HashMap<String, Object>();
-  final private Set<String> nullParamList = new HashSet<String>();
-  final private Set<String> notNullParamList = new HashSet<String>();
-
-  private Map<String, Object> urlParamMap = new HashMap<String, Object>();
-
-  public SearchCriteria(HttpServletRequest request) {
-    try {
-      if (request.getParameter("startIndex") != null && (!request.getParameter("startIndex").isEmpty())) {
-        this.startIndex = new Integer(request.getParameter("startIndex"));
-      }
-      if (request.getParameter("page") != null && (!request.getParameter("page").isEmpty())) {
-        this.page = new Integer(request.getParameter("page"));
-      }
-      if (request.getParameter("pageSize") != null && (!request.getParameter("pageSize").isEmpty())) {
-        this.maxRows = new Integer(request.getParameter("pageSize"));
-      } else {
-        this.maxRows = PropertiesHelper.getIntProperty("db.maxResults", 50);
-      }
-    } catch (NumberFormatException e) {
-      // do nothing
-    }
-
-    // Sort fields
-    if (request.getParameter("sortBy") != null && (!request.getParameter("sortBy").isEmpty())) {
-      this.sortBy = "" + request.getParameter("sortBy");
-    }
-    if (request.getParameter("sortType") != null && (!request.getParameter("sortType").isEmpty())) {
-      this.sortType = "" + request.getParameter("sortType");
-    }
-
-    // url params
-    if (request.getParameter("start_time") != null && (!request.getParameter("start_time").isEmpty())) {
-      this.globalStartTime = "" + request.getParameter("start_time");
-      this.urlParamMap.put("globalStartTime", request.getParameter("start_time"));
-    }
-    if (request.getParameter("end_time") != null && (!request.getParameter("end_time").isEmpty())) {
-      this.globalEndTime = "" + request.getParameter("end_time");
-      this.urlParamMap.put("globalEndTime", request.getParameter("end_time"));
-    }
-  }
-
-  public SearchCriteria() {
-    // Auto-generated constructor stub
-  }
-
-  /**
-   * @return the startIndex
-   */
-  public int getStartIndex() {
-    return startIndex;
-  }
-
-  /**
-   * @param startIndex the startIndex to set
-   */
-  public void setStartIndex(int startIndex) {
-    this.startIndex = startIndex;
-  }
-
-  /**
-   * @return the maxRows
-   */
-  public int getMaxRows() {
-    return maxRows;
-  }
-
-  /**
-   * @param maxRows the maxRows to set
-   */
-  public void setMaxRows(int maxRows) {
-    this.maxRows = maxRows;
-  }
-
-  /**
-   * @return the sortType
-   */
-
-  public String getSortType() {
-    return sortType;
-  }
-
-  /**
-   * @param sortType the sortType to set
-   */
-
-  public boolean isGetCount() {
-    return getCount;
-  }
-
-  public void setGetCount(boolean getCount) {
-    this.getCount = getCount;
-  }
-
-  /**
-   * @return the paramList
-   */
-  public HashMap<String, Object> getParamList() {
-    return paramList;
-  }
-
-  /**
-   * @param paramList the paramList to set
-   */
-  public void setParamList(HashMap<String, Object> paramList) {
-    this.paramList = paramList;
-  }
-
-  /**
-   * @param request
-   */
-  public void addRequiredServiceLogsParams(HttpServletRequest request) {
-    this.addParam("advanceSearch", StringEscapeUtils.unescapeXml(request.getParameter("advanceSearch")));
-    this.addParam("q", request.getParameter("q"));
-    this.addParam("treeParams", StringEscapeUtils.unescapeHtml(request.getParameter("treeParams")));
-    this.addParam("level", request.getParameter("level"));
-    this.addParam("gMustNot", request.getParameter("gMustNot"));
-    this.addParam("from", request.getParameter("from"));
-    this.addParam("to", request.getParameter("to"));
-    this.addParam("selectComp", request.getParameter("mustBe"));
-    this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
-    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
-    this.addParam(LogSearchConstants.BUNDLE_ID, request.getParameter(LogSearchConstants.BUNDLE_ID));
-    this.addParam("host_name", request.getParameter("host_name"));
-    this.addParam("component_name", request.getParameter("component_name"));
-    this.addParam("file_name", request.getParameter("file_name"));
-    this.addParam("startDate", request.getParameter("start_time"));
-    this.addParam("endDate", request.getParameter("end_time"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
-  }
-
-  /**
-   * @param request
-   */
-  public void addRequiredAuditLogsParams(HttpServletRequest request) {
-    this.addParam("q", request.getParameter("q"));
-    this.addParam("columnQuery", StringEscapeUtils.unescapeXml(request.getParameter("columnQuery")));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
-    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
-    this.addParam("includeString", request.getParameter("mustBe"));
-    this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
-    this.addParam("startTime", request.getParameter("from"));
-    this.addParam("endTime", request.getParameter("to"));
-  }
-
-  /**
-   * @param string
-   * @param caId
-   */
-  public void addParam(String name, Object value) {
-    String solrValue = PropertiesHelper.getProperty(name);
-    if (solrValue == null || solrValue.isEmpty()) {
-      paramList.put(name, value);
-    } else {
-      try {
-        String propertyFieldMappings[] = solrValue.split(",");
-        HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
-        for (String temp : propertyFieldMappings) {
-          String arrayValue[] = temp.split(":");
-          propertyFieldValue.put(arrayValue[0].toLowerCase(Locale.ENGLISH), arrayValue[1].toLowerCase(Locale.ENGLISH));
-        }
-        String originalValue = propertyFieldValue.get(value.toString().toLowerCase(Locale.ENGLISH));
-        if (originalValue != null && !originalValue.isEmpty())
-          paramList.put(name, originalValue);
-
-      } catch (Exception e) {
-        //do nothing
-      }
-    }
-  }
-
-  public void setNullParam(String name) {
-    nullParamList.add(name);
-  }
-
-  public void setNotNullParam(String name) {
-    notNullParamList.add(name);
-  }
-
-  public Object getParamValue(String name) {
-    return paramList.get(name);
-  }
-
-  /**
-   * @return the nullParamList
-   */
-  public Set<String> getNullParamList() {
-    return nullParamList;
-  }
-
-  /**
-   * @return the notNullParamList
-   */
-  public Set<String> getNotNullParamList() {
-    return notNullParamList;
-  }
-
-  /**
-   * @return the isDistinct
-   */
-  public boolean isDistinct() {
-    return isDistinct;
-  }
-
-  public String getSortBy() {
-    return sortBy;
-  }
-
-  public void setSortBy(String sortBy) {
-    this.sortBy = sortBy;
-  }
-
-  public void setSortType(String sortType) {
-    this.sortType = sortType;
-  }
-
-  /**
-   * @param isDistinct the isDistinct to set
-   */
-  public void setDistinct(boolean isDistinct) {
-    this.isDistinct = isDistinct;
-  }
-
-  public int getPage() {
-    return page;
-  }
-
-  public void setPage(int page) {
-    this.page = page;
-  }
-
-  public String getGlobalStartTime() {
-    return globalStartTime;
-  }
-
-  public void setGlobalStartTime(String globalStartTime) {
-    this.globalStartTime = globalStartTime;
-  }
-
-  public String getGlobalEndTime() {
-    return globalEndTime;
-  }
-
-  public void setGlobalEndTime(String globalEndTime) {
-    this.globalEndTime = globalEndTime;
-  }
-
-  public Map<String, Object> getUrlParamMap() {
-    return urlParamMap;
-  }
-
-  public void setUrlParamMap(Map<String, Object> urlParamMap) {
-    this.urlParamMap = urlParamMap;
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
new file mode 100644
index 0000000..0ddad65
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApiDocConfig.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import io.swagger.jaxrs.config.BeanConfig;
+import io.swagger.jaxrs.listing.ApiListingResource;
+import io.swagger.jaxrs.listing.SwaggerSerializers;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+@Configuration
+public class ApiDocConfig {
+
+  @Bean
+  public ApiListingResource apiListingResource() {
+    return new ApiListingResource();
+  }
+
+  @Bean
+  public SwaggerSerializers swaggerSerializers() {
+    return new SwaggerSerializers();
+  }
+
+  @Bean
+  public BeanConfig swaggerConfig() throws UnknownHostException {
+    BeanConfig beanConfig = new BeanConfig();
+    beanConfig.setSchemes(new String[]{"http", "https"});
+    beanConfig.setHost(InetAddress.getLocalHost().getHostAddress() + ":61888"); // TODO: port from property
+    beanConfig.setBasePath("/api/v1");
+    beanConfig.setTitle("Log Search REST API");
+    beanConfig.setDescription("Log aggregation, analysis, and visualization.");
+    beanConfig.setLicense("Apache 2.0");
+    beanConfig.setLicenseUrl("http://www.apache.org/licenses/LICENSE-2.0.html");
+    beanConfig.setScan(true);
+    beanConfig.setVersion("1.0.0");
+    beanConfig.setResourcePackage("org.apache.ambari.logsearch.rest");
+    return beanConfig;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
new file mode 100644
index 0000000..72ea942
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/ApplicationConfig.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.apache.ambari.logsearch.query.converter.AnyGraphRequestConverter;
+import org.apache.ambari.logsearch.query.converter.AuditBarGraphRequestConverter;
+import org.apache.ambari.logsearch.query.converter.AuditLogRequestConverter;
+import org.apache.ambari.logsearch.query.converter.BaseAuditLogRequestConverter;
+import org.apache.ambari.logsearch.query.converter.BaseServiceLogRequestConverter;
+import org.apache.ambari.logsearch.query.converter.FieldAuditLogRequestConverter;
+import org.apache.ambari.logsearch.query.converter.FieldBarGraphRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceAnyGraphRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceExtremeDatesRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceGraphRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceLogExportRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceLogFileRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceLogRequestConverter;
+import org.apache.ambari.logsearch.query.converter.ServiceLogTruncatedRequestConverter;
+import org.apache.ambari.logsearch.query.converter.SimpleQueryRequestConverter;
+import org.apache.ambari.logsearch.query.converter.UserExportRequestConverter;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.ImportResource;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.context.support.ConversionServiceFactoryBean;
+import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
+import org.springframework.core.convert.converter.Converter;
+
+import java.util.HashSet;
+import java.util.Set;
+
+@Configuration
+@ComponentScan("org.apache.ambari.logsearch")
+@PropertySource(value = {"classpath:default.properties", "classpath:logsearch.properties"})
+@ImportResource("META-INF/security-applicationContext.xml")
+public class ApplicationConfig {
+
+  @Bean
+  public static PropertySourcesPlaceholderConfigurer propertyConfigurer() {
+    return new PropertySourcesPlaceholderConfigurer();
+  }
+
+  @Bean(name="conversionService")
+  public ConversionServiceFactoryBean conversionServiceFactoryBean() {
+    ConversionServiceFactoryBean conversionServiceFactoryBean = new ConversionServiceFactoryBean();
+    Set<Converter> converters = new HashSet<>();
+    converters.add(new AnyGraphRequestConverter());
+    converters.add(new AuditBarGraphRequestConverter());
+    converters.add(new AuditLogRequestConverter());
+    converters.add(new BaseAuditLogRequestConverter());
+    converters.add(new BaseServiceLogRequestConverter());
+    converters.add(new FieldAuditLogRequestConverter());
+    converters.add(new FieldBarGraphRequestConverter());
+    converters.add(new SimpleQueryRequestConverter());
+    converters.add(new UserExportRequestConverter());
+    converters.add(new ServiceAnyGraphRequestConverter());
+    converters.add(new ServiceExtremeDatesRequestConverter());
+    converters.add(new ServiceGraphRequestConverter());
+    converters.add(new ServiceLogExportRequestConverter());
+    converters.add(new ServiceLogFileRequestConverter());
+    converters.add(new ServiceLogRequestConverter());
+    converters.add(new ServiceLogTruncatedRequestConverter());
+    conversionServiceFactoryBean.setConverters(converters);
+    return conversionServiceFactoryBean;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
new file mode 100644
index 0000000..3398a83
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/AuthConfig.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class AuthConfig {
+
+  @Value("${logsearch.auth.file.enable:true}")
+  boolean authFileEnabled;
+  @Value("${logsearch.auth.ldap.enable:false}")
+  boolean authLdapEnabled;
+  @Value("${logsearch.auth.simple.enable:false}")
+  boolean authSimpleEnabled;
+  @Value("${logsearch.auth.external_auth.host_url:'http://ip:port'}")
+  private String externalAuthHostUrl;
+  @Value("${logsearch.auth.login_url:/api/v1/users/$USERNAME/privileges?fields=*}")
+  private String externalAuthLoginUrl;
+  @Value("${logsearch.login.credentials.file:user_pass.json}")
+  private String credentialsFile;
+
+  public boolean isAuthFileEnabled() {
+    return authFileEnabled;
+  }
+
+  public void setAuthFileEnabled(boolean authFileEnabled) {
+    this.authFileEnabled = authFileEnabled;
+  }
+
+  public boolean isAuthLdapEnabled() {
+    return authLdapEnabled;
+  }
+
+  public void setAuthLdapEnabled(boolean authLdapEnabled) {
+    this.authLdapEnabled = authLdapEnabled;
+  }
+
+  public boolean isAuthSimpleEnabled() {
+    return authSimpleEnabled;
+  }
+
+  public void setAuthSimpleEnabled(boolean authSimpleEnabled) {
+    this.authSimpleEnabled = authSimpleEnabled;
+  }
+
+  public String getCredentialsFile() {
+    return credentialsFile;
+  }
+
+  public void setCredentialsFile(String credentialsFile) {
+    this.credentialsFile = credentialsFile;
+  }
+
+  public String getExternalAuthHostUrl() {
+    return externalAuthHostUrl;
+  }
+
+  public void setExternalAuthHostUrl(String externalAuthHostUrl) {
+    this.externalAuthHostUrl = externalAuthHostUrl;
+  }
+
+  public String getExternalAuthLoginUrl() {
+    return externalAuthLoginUrl;
+  }
+
+  public void setExternalAuthLoginUrl(String externalAuthLoginUrl) {
+    this.externalAuthLoginUrl = externalAuthLoginUrl;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogConfig.java
new file mode 100644
index 0000000..ae4dca9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrAuditLogConfig.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.List;
+import java.util.Map;
+
+@Configuration
+public class SolrAuditLogConfig implements SolrConfig, SolrColumnConfig {
+
+  @Value("${logsearch.solr.audit.logs.url:}")
+  private String solrUrl;
+
+  @Value("${logsearch.solr.audit.logs.zk_connect_string:}")
+  private String zkConnectString;
+
+  @Value("${logsearch.solr.collection.audit.logs:audit_logs}")
+  private String collection;
+
+  @Value("${logsearch.ranger.audit.logs.collection.name:}")
+  private String rangerCollection;
+
+  @Value("${logsearch.solr.audit.logs.config.name:audit_logs}")
+  private String configName;
+
+  @Value("${logsearch.solr.audit.logs.alias.name:audit_logs_alias}")
+  private String aliasNameIn;
+
+  @Value("${logsearch.audit.logs.split.interval.mins:none}")
+  private String splitInterval;
+
+  @Value("${logsearch.collection.audit.logs.numshards:1}")
+  private Integer numberOfShards;
+
+  @Value("${logsearch.collection.audit.logs.replication.factor:1}")
+  private Integer replicationFactor;
+
+  @Value("#{propertyMapper.map('${logsearch.solr.audit.logs.column.mapping}')}")
+  private Map<String, String> columnMapping;
+
+  @Value("#{propertyMapper.list('${logsearch.solr.audit.logs.exclude.columnlist}')}")
+  private List<String> excludeColumnList;
+
+  @Value("#{propertyMapper.solrUiMap('${logsearch.solr.audit.logs.column.mapping}')}")
+  private Map<String, String> solrAndUiColumns;
+
+  @Override
+  public String getSolrUrl() {
+    return solrUrl;
+  }
+
+  @Override
+  public void setSolrUrl(String solrUrl) {
+    this.solrUrl = solrUrl;
+  }
+
+  @Override
+  public String getCollection() {
+    return collection;
+  }
+
+  @Override
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  @Override
+  public String getZkConnectString() {
+    return zkConnectString;
+  }
+
+  @Override
+  public void setZkConnectString(String zkConnectString) {
+    this.zkConnectString = zkConnectString;
+  }
+
+  @Override
+  public String getConfigName() {
+    return configName;
+  }
+
+  @Override
+  public void setConfigName(String configName) {
+    this.configName = configName;
+  }
+
+  @Override
+  public Integer getNumberOfShards() {
+    return numberOfShards;
+  }
+
+  @Override
+  public void setNumberOfShards(Integer numberOfShards) {
+    this.numberOfShards = numberOfShards;
+  }
+
+  @Override
+  public Integer getReplicationFactor() {
+    return replicationFactor;
+  }
+
+  @Override
+  public void setReplicationFactor(Integer replicationFactor) {
+    this.replicationFactor = replicationFactor;
+  }
+
+  @Override
+  public String getSplitInterval() {
+    return splitInterval;
+  }
+
+  @Override
+  public void setSplitInterval(String splitInterval) {
+    this.splitInterval = splitInterval;
+  }
+
+  @Override
+  public List<String> getExcludeColumnList() {
+    return excludeColumnList;
+  }
+
+  @Override
+  public void setExcludeColumnList(List<String> excludeColumnList) {
+    this.excludeColumnList = excludeColumnList;
+  }
+
+  @Override
+  public Map<String, String> getColumnMapping() {
+    return columnMapping;
+  }
+
+  @Override
+  public void setColumnMapping(Map<String, String> columnMappings) {
+    this.columnMapping = columnMappings;
+  }
+
+  @Override
+  public Map<String, String> getSolrAndUiColumns() {
+    return solrAndUiColumns;
+  }
+
+  @Override
+  public void setSolrAndUiColumns(Map<String, String> solrAndUiColumns) {
+    this.solrAndUiColumns = solrAndUiColumns;
+  }
+
+  public String getRangerCollection() {
+    return rangerCollection;
+  }
+
+  public void setRangerCollection(String rangerCollection) {
+    this.rangerCollection = rangerCollection;
+  }
+
+  public String getAliasNameIn() {
+    return aliasNameIn;
+  }
+
+  public void setAliasNameIn(String aliasNameIn) {
+    this.aliasNameIn = aliasNameIn;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrColumnConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrColumnConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrColumnConfig.java
new file mode 100644
index 0000000..a12b2ce
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrColumnConfig.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import java.util.List;
+import java.util.Map;
+
+public interface SolrColumnConfig {
+  Map<String, String> getColumnMapping();
+
+  void setColumnMapping(Map<String, String> columnMapping);
+
+  List<String> getExcludeColumnList();
+
+  void setExcludeColumnList(List<String> excludeColumnList);
+
+  Map<String, String> getSolrAndUiColumns();
+
+  void setSolrAndUiColumns(Map<String, String> solrAndUiColumns);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java
new file mode 100644
index 0000000..cfbe097
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConfig.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+public interface SolrConfig {
+  String getSolrUrl();
+
+  void setSolrUrl(String solrUrl);
+
+  String getZkConnectString();
+
+  void setZkConnectString(String zkConnectString);
+
+  String getCollection();
+
+  void setCollection(String collection);
+
+  String getConfigName();
+
+  void setConfigName(String configName);
+
+  Integer getNumberOfShards();
+
+  void setNumberOfShards(Integer numberOfShards);
+
+  Integer getReplicationFactor();
+
+  void setReplicationFactor(Integer replicationFactor);
+
+  String getSplitInterval();
+
+  void setSplitInterval(String splitInterval);
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionConfig.java
new file mode 100644
index 0000000..b3dceea
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrConnectionConfig.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+
+public abstract class SolrConnectionConfig implements SolrConfig {
+  @Value("${logsearch.solr.url:}")
+  private String solrUrl;
+
+  @Value("${logsearch.solr.zk_connect_string:}")
+  private String zkConnectString;
+
+  @Override
+  public String getSolrUrl() {
+    return solrUrl;
+  }
+
+  @Override
+  public void setSolrUrl(String solrUrl) {
+    this.solrUrl = solrUrl;
+  }
+
+  @Override
+  public String getZkConnectString() {
+    return zkConnectString;
+  }
+
+  @Override
+  public void setZkConnectString(String zkConnectString) {
+    this.zkConnectString = zkConnectString;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java
new file mode 100644
index 0000000..7cf79b0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrKerberosConfig.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class SolrKerberosConfig {
+
+  @Value("${logsearch.solr.jaas.file:/usr/lib/ambari-logsearch-portal/logsearch_solr_jaas.conf}")
+  private String jaasFile;
+
+  @Value("${logsearch.solr.kerberos.enable:false}")
+  private boolean enabled;
+
+  public String getJaasFile() {
+    return jaasFile;
+  }
+
+  public void setJaasFile(String jaasFile) {
+    this.jaasFile = jaasFile;
+  }
+
+  public boolean isEnabled() {
+    return enabled;
+  }
+
+  public void setEnabled(boolean enabled) {
+    this.enabled = enabled;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogConfig.java
new file mode 100644
index 0000000..e768402
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrServiceLogConfig.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.List;
+import java.util.Map;
+
+@Configuration
+public class SolrServiceLogConfig extends SolrConnectionConfig implements SolrColumnConfig {
+
+  @Value("${logsearch.solr.collection.service.logs:hadoop_logs}")
+  private String collection;
+
+  @Value("${logsearch.service.logs.split.interval.mins:none}")
+  private String splitInterval;
+
+  @Value("${logsearch.solr.service.logs.config.name:hadoop_logs}")
+  private String configName;
+
+  @Value("${logsearch.collection.service.logs.numshards:1}")
+  private Integer numberOfShards;
+
+  @Value("${logsearch.collection.service.logs.replication.factor:1}")
+  private Integer replicationFactor;
+
+  @Value("#{propertyMapper.list('${logsearch.service.logs.fields}')}")
+  private List<String> fields;
+
+  @Value("#{propertyMapper.map('${logsearch.solr.audit.logs.column.mapping}')}")
+  private Map<String, String> columnMapping;
+
+  @Value("#{propertyMapper.list('${logsearch.solr.audit.logs.exclude.columnlist}')}")
+  private List<String> excludeColumnList;
+
+  @Value("#{propertyMapper.solrUiMap('${logsearch.solr.audit.logs.column.mapping}}')}")
+  private Map<String, String> solrAndUiColumns;
+
+  @Override
+  public String getCollection() {
+    return collection;
+  }
+
+  @Override
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  @Override
+  public String getSplitInterval() {
+    return splitInterval;
+  }
+
+  @Override
+  public void setSplitInterval(String splitInterval) {
+    this.splitInterval = splitInterval;
+  }
+
+  @Override
+  public String getConfigName() {
+    return configName;
+  }
+
+  @Override
+  public void setConfigName(String configName) {
+    this.configName = configName;
+  }
+
+  @Override
+  public Integer getNumberOfShards() {
+    return numberOfShards;
+  }
+
+  @Override
+  public void setNumberOfShards(Integer numberOfShards) {
+    this.numberOfShards = numberOfShards;
+  }
+
+  @Override
+  public Integer getReplicationFactor() {
+    return replicationFactor;
+  }
+
+  @Override
+  public void setReplicationFactor(Integer replicationFactor) {
+    this.replicationFactor = replicationFactor;
+  }
+
+  @Override
+  public Map<String, String> getColumnMapping() {
+    return columnMapping;
+  }
+
+  @Override
+  public void setColumnMapping(Map<String, String> columnMapping) {
+    this.columnMapping = columnMapping;
+  }
+
+  @Override
+  public List<String> getExcludeColumnList() {
+    return excludeColumnList;
+  }
+
+  @Override
+  public void setExcludeColumnList(List<String> excludeColumnList) {
+    this.excludeColumnList = excludeColumnList;
+  }
+
+  @Override
+  public Map<String, String> getSolrAndUiColumns() {
+    return solrAndUiColumns;
+  }
+
+  @Override
+  public void setSolrAndUiColumns(Map<String, String> solrAndUiColumns) {
+    this.solrAndUiColumns = solrAndUiColumns;
+  }
+
+  public List<String> getFields() {
+    return fields;
+  }
+
+  public void setFields(List<String> fields) {
+    this.fields = fields;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrUserConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrUserConfig.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrUserConfig.java
new file mode 100644
index 0000000..8ae630a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/conf/SolrUserConfig.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.conf;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.List;
+
+@Configuration
+public class SolrUserConfig extends SolrConnectionConfig {
+
+  @Value("${logsearch.solr.collection.history:history}")
+  private String collection;
+
+  @Value("${logsearch.history.split.interval.mins:none}")
+  private String splitInterval;
+
+  @Value("${logsearch.solr.history.config.name:history}")
+  private String configName;
+
+  @Value("${logsearch.collection.history.numshards:1}")
+  private Integer numberOfShards;
+
+  @Value("${logsearch.collection.history.replication.factor:2}")
+  private Integer replicationFactor;
+
+  @Value("#{'${logsearch.logfeeder.include.default.level:FATAL,ERROR,WARN,INFO,DEBUG,TRACE,UNKNOWN}'.split(',')}")
+  private List<String> logLevels;
+
+  @Override
+  public String getCollection() {
+    return collection;
+  }
+
+  @Override
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  @Override
+  public String getSplitInterval() {
+    return splitInterval;
+  }
+
+  @Override
+  public void setSplitInterval(String splitInterval) {
+    this.splitInterval = splitInterval;
+  }
+
+  @Override
+  public String getConfigName() {
+    return configName;
+  }
+
+  @Override
+  public void setConfigName(String configName) {
+    this.configName = configName;
+  }
+
+  @Override
+  public Integer getNumberOfShards() {
+    return numberOfShards;
+  }
+
+  @Override
+  public void setNumberOfShards(Integer numberOfShards) {
+    this.numberOfShards = numberOfShards;
+  }
+
+  @Override
+  public Integer getReplicationFactor() {
+    return replicationFactor;
+  }
+
+  @Override
+  public void setReplicationFactor(Integer replicationFactor) {
+    this.replicationFactor = replicationFactor;
+  }
+
+  public List<String> getLogLevels() {
+    return logLevels;
+  }
+
+  public void setLogLevels(List<String> logLevels) {
+    this.logLevels = logLevels;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 64aa776..4f5e734 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -23,9 +23,11 @@ import java.util.Arrays;
 import java.util.Collection;
 
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
 
 import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.conf.SolrAuditLogConfig;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -33,23 +35,26 @@ import org.springframework.stereotype.Component;
 @Component
 public class AuditSolrDao extends SolrDaoBase {
 
-  static private Logger logger = Logger.getLogger(AuditSolrDao.class);
-  
+  private static final Logger logger = Logger.getLogger(AuditSolrDao.class);
+
+  @Inject
+  private SolrAuditLogConfig solrAuditLogConfig;
+
   public AuditSolrDao() {
     super(LogType.AUDIT);
   }
 
   @PostConstruct
   public void postConstructor() {
-    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.audit.logs.url");
-    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.audit.logs.zk_connect_string");
-    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.audit.logs", "audit_logs");
-    String aliasNameIn = PropertiesHelper.getProperty("logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
-    String rangerAuditCollection = PropertiesHelper.getProperty("logsearch.ranger.audit.logs.collection.name");
-    String splitInterval = PropertiesHelper.getProperty("logsearch.audit.logs.split.interval.mins", "none");
-    String configName = PropertiesHelper.getProperty("logsearch.solr.audit.logs.config.name", "audit_logs");
-    int numberOfShards = PropertiesHelper.getIntProperty("logsearch.collection.audit.logs.numshards", 1);
-    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.audit.logs.replication.factor", 1);
+    String solrUrl = solrAuditLogConfig.getSolrUrl();
+    String zkConnectString = solrAuditLogConfig.getZkConnectString();
+    String collection = solrAuditLogConfig.getCollection();
+    String aliasNameIn = solrAuditLogConfig.getAliasNameIn();
+    String rangerAuditCollection = solrAuditLogConfig.getRangerCollection();
+    String splitInterval = solrAuditLogConfig.getSplitInterval();
+    String configName = solrAuditLogConfig.getConfigName();
+    int numberOfShards = solrAuditLogConfig.getNumberOfShards();
+    int replicationFactor = solrAuditLogConfig.getReplicationFactor();
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index 6e2bb4b..e338b7c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -20,9 +20,11 @@
 package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
 
 import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 
@@ -30,6 +32,9 @@ import org.springframework.stereotype.Component;
 public class ServiceLogsSolrDao extends SolrDaoBase {
 
   private static final Logger logger = Logger.getLogger(ServiceLogsSolrDao.class);
+
+  @Inject
+  private SolrServiceLogConfig solrServiceLogConfig;
   
   public ServiceLogsSolrDao() {
     super(LogType.SERVICE);
@@ -38,13 +43,13 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
   @PostConstruct
   public void postConstructor() {
     logger.info("postConstructor() called.");
-    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.url");
-    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.service.logs", "hadoop_logs");
-    String splitInterval = PropertiesHelper.getProperty("logsearch.service.logs.split.interval.mins", "none");
-    String configName = PropertiesHelper.getProperty("logsearch.solr.service.logs.config.name", "hadoop_logs");
-    int numberOfShards = PropertiesHelper.getIntProperty("logsearch.collection.service.logs.numshards", 1);
-    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.service.logs.replication.factor", 1);
+    String solrUrl = solrServiceLogConfig.getSolrUrl();
+    String zkConnectString = solrServiceLogConfig.getZkConnectString();
+    String collection = solrServiceLogConfig.getCollection();
+    String splitInterval = solrServiceLogConfig.getSplitInterval();
+    String configName = solrServiceLogConfig.getConfigName();
+    int numberOfShards = solrServiceLogConfig.getNumberOfShards();
+    int replicationFactor = solrServiceLogConfig.getReplicationFactor();
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 8cdb6eb..b325171 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -30,7 +30,9 @@ import org.apache.ambari.logsearch.common.ConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.conf.SolrKerberosConfig;
+import org.apache.ambari.logsearch.conf.SolrUserConfig;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
@@ -58,6 +60,8 @@ import org.apache.solr.common.util.NamedList;
 
 import com.google.common.annotations.VisibleForTesting;
 
+import javax.inject.Inject;
+
 public abstract class SolrDaoBase {
   private static final Logger logger = Logger.getLogger(SolrDaoBase.class);
   private static final Logger logPerformance = Logger.getLogger("org.apache.ambari.logsearch.performance");
@@ -85,6 +89,11 @@ public abstract class SolrDaoBase {
   private String solrDetail = "";
 
   private boolean populateFieldsThreadActive = false;
+
+  @Inject
+  private SolrKerberosConfig solrKerberosConfig;
+  @Inject
+  private SolrUserConfig solrUserConfig;
   
   protected SolrDaoBase(LogType logType) {
     this.logType = logType;
@@ -461,8 +470,8 @@ public abstract class SolrDaoBase {
   }
 
   private void setupSecurity() {
-    String jaasFile = PropertiesHelper.getProperty("logsearch.solr.jaas.file", "/etc/security/keytabs/logsearch_solr.service.keytab");
-    boolean securityEnabled = PropertiesHelper.getBooleanProperty("logsearch.solr.kerberos.enable", false);
+    String jaasFile = solrKerberosConfig.getJaasFile();
+    boolean securityEnabled = solrKerberosConfig.isEnabled();
     if (securityEnabled) {
       System.setProperty("java.security.auth.login.config", jaasFile);
       HttpClientUtil.setConfigurer(new Krb5HttpClientConfigurer());
@@ -512,12 +521,12 @@ public abstract class SolrDaoBase {
     SolrRequest<SchemaResponse> request = new SchemaRequest();
     request.setMethod(METHOD.GET);
     request.setPath("/schema");
-    String historyCollection = PropertiesHelper.getProperty("logsearch.solr.collection.history","history");
+    String historyCollection = solrUserConfig.getCollection();
     if (solrClient != null && !collectionName.equals(historyCollection)) {
       NamedList<Object> namedList = null;
       try {
         namedList = solrClient.request(request);
-        logger.info("populateSchemaFields() collection=" + collectionName + ", fields=" + namedList);
+        logger.debug("populateSchemaFields() collection=" + collectionName + ", fields=" + namedList);
       } catch (SolrException | SolrServerException | IOException e) {
         logger.error("Error occured while popuplating field. collection=" + collectionName, e);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index 026c78f..a9fb8d2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -21,15 +21,17 @@ package org.apache.ambari.logsearch.dao;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Scanner;
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+
+import org.apache.ambari.logsearch.conf.SolrUserConfig;
 import org.apache.ambari.logsearch.view.VLogfeederFilterWrapper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
-import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -42,7 +44,7 @@ import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import com.google.gson.JsonParseException;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LogType;
+import org.apache.ambari.logsearch.manager.ManagerBase.LogType;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -52,7 +54,9 @@ import org.springframework.util.CollectionUtils;
 public class UserConfigSolrDao extends SolrDaoBase {
 
   private static final Logger logger = Logger.getLogger(UserConfigSolrDao.class);
-  private static final String DEFAULT_LEVELS = "FATAL,ERROR,WARN,INFO,DEBUG,TRACE";
+
+  @Inject
+  private SolrUserConfig solrUserConfig;
 
   public UserConfigSolrDao() {
     super(LogType.SERVICE);
@@ -60,13 +64,13 @@ public class UserConfigSolrDao extends SolrDaoBase {
 
   @PostConstruct
   public void postConstructor() {
-    String solrUrl = PropertiesHelper.getProperty("logsearch.solr.url");
-    String zkConnectString = PropertiesHelper.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesHelper.getProperty("logsearch.solr.collection.history", "history");
-    String configName = PropertiesHelper.getProperty("logsearch.solr.history.config.name", "history");
-    int replicationFactor = PropertiesHelper.getIntProperty("logsearch.collection.history.replication.factor", 2);
-    String splitInterval = "none";
-    int numberOfShards = 1;
+    String solrUrl = solrUserConfig.getSolrUrl();
+    String zkConnectString = solrUserConfig.getZkConnectString();
+    String collection = solrUserConfig.getCollection();
+    String configName = solrUserConfig.getConfigName();
+    int replicationFactor = solrUserConfig.getReplicationFactor();
+    String splitInterval = solrUserConfig.getSplitInterval();
+    int numberOfShards = solrUserConfig.getNumberOfShards();
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
@@ -117,14 +121,14 @@ public class UserConfigSolrDao extends SolrDaoBase {
     if (!CollectionUtils.isEmpty(documentList)) {
       SolrDocument configDoc = documentList.get(0);
       String configJson = JSONUtil.objToJson(configDoc);
-      HashMap<String, Object> configMap = (HashMap<String, Object>) JSONUtil.jsonToMapObject(configJson);
+      HashMap<String, Object> configMap = JSONUtil.jsonToMapObject(configJson);
       String json = (String) configMap.get(LogSearchConstants.VALUES);
       logfeederFilterWrapper = (VLogfeederFilterWrapper) JSONUtil.jsonToObj(json, VLogfeederFilterWrapper.class);
       logfeederFilterWrapper.setId("" + configDoc.get(LogSearchConstants.ID));
 
     } else {
-      String logfeederDefaultLevels = PropertiesHelper.getProperty("logsearch.logfeeder.include.default.level", DEFAULT_LEVELS);
-      JSONArray levelJsonArray = new JSONArray(Arrays.asList(logfeederDefaultLevels.split(",")));
+      List<String> logfeederDefaultLevels = solrUserConfig.getLogLevels();
+      JSONArray levelJsonArray = new JSONArray(logfeederDefaultLevels);
 
       String hadoopServiceString = getHadoopServiceConfigJSON();
       String key = null;

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index 39f0e25..a04dee4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -24,8 +24,9 @@ import java.util.Arrays;
 import java.util.HashMap;
 
 import javax.annotation.PostConstruct;
+import javax.inject.Inject;
 
-import org.springframework.beans.factory.annotation.Autowired;
+import org.apache.ambari.logsearch.conf.AuthConfig;
 import org.springframework.security.core.GrantedAuthority;
 import org.springframework.stereotype.Repository;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
@@ -50,9 +51,12 @@ public class UserDao {
   private static final String ENC_PASSWORD = "en_password";
   private static final String NAME = "name";
 
-  @Autowired
+  @Inject
   private LogsearchFileAuthenticationProvider fileAuthenticationProvider;
 
+  @Inject
+  private AuthConfig authConfig;
+
   private ArrayList<HashMap<String, String>> userList = null;
 
   @SuppressWarnings("unchecked")
@@ -60,7 +64,7 @@ public class UserDao {
   public void initialization() {
     if (fileAuthenticationProvider.isEnable()) {
       try {
-        String userPassJsonFileName = PropertiesHelper.getProperty("logsearch.login.credentials.file");
+        String userPassJsonFileName = authConfig.getCredentialsFile();
         logger.info("USER PASS JSON  file NAME:" + userPassJsonFileName);
         File jsonFile = FileUtil.getFileFromClasspath(userPassJsonFileName);
         if (jsonFile == null || !jsonFile.exists()) {


[34/50] [abbrv] ambari git commit: AMBARI-18310. Refactor logsearch portal side code (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
new file mode 100644
index 0000000..c4d14a9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
@@ -0,0 +1,1917 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.manager;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TimeZone;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import javax.inject.Inject;
+import javax.ws.rs.core.Response;
+
+import org.apache.ambari.logsearch.common.ConfigHelper;
+import org.apache.ambari.logsearch.common.LogSearchConstants;
+import org.apache.ambari.logsearch.common.MessageEnums;
+import org.apache.ambari.logsearch.common.PropertiesHelper;
+import org.apache.ambari.logsearch.conf.SolrServiceLogConfig;
+import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
+import org.apache.ambari.logsearch.graph.GraphDataGenerator;
+import org.apache.ambari.logsearch.model.response.BarGraphData;
+import org.apache.ambari.logsearch.model.response.BarGraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.CountData;
+import org.apache.ambari.logsearch.model.response.CountDataListResponse;
+import org.apache.ambari.logsearch.model.response.GraphData;
+import org.apache.ambari.logsearch.model.response.GraphDataListResponse;
+import org.apache.ambari.logsearch.model.response.GroupListResponse;
+import org.apache.ambari.logsearch.model.response.LogData;
+import org.apache.ambari.logsearch.model.response.LogListResponse;
+import org.apache.ambari.logsearch.model.response.LogSearchResponse;
+import org.apache.ambari.logsearch.model.response.NameValueData;
+import org.apache.ambari.logsearch.model.response.NameValueDataListResponse;
+import org.apache.ambari.logsearch.model.response.NodeData;
+import org.apache.ambari.logsearch.model.response.NodeListResponse;
+import org.apache.ambari.logsearch.model.response.ServiceLogData;
+import org.apache.ambari.logsearch.model.response.ServiceLogResponse;
+import org.apache.ambari.logsearch.query.QueryGenerationBase;
+import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
+import org.apache.ambari.logsearch.solr.model.SolrHostLogData;
+import org.apache.ambari.logsearch.solr.model.SolrServiceLogData;
+import org.apache.ambari.logsearch.util.BizUtil;
+import org.apache.ambari.logsearch.util.DateUtil;
+import org.apache.ambari.logsearch.util.FileUtil;
+import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.ambari.logsearch.util.SolrUtil;
+import org.apache.ambari.logsearch.view.VSummary;
+import org.apache.ambari.logsearch.query.model.SearchCriteria;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.time.DateUtils;
+import org.apache.log4j.Logger;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.FacetField;
+import org.apache.solr.client.solrj.response.FacetField.Count;
+import org.apache.solr.client.solrj.response.PivotField;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.RangeFacet;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.SimpleOrderedMap;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import com.google.common.collect.Lists;
+
+@Component
+public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceLogResponse> {
+
+  private static final Logger logger = Logger.getLogger(ServiceLogsManager.class);
+
+  private static List<String> cancelByDate = new CopyOnWriteArrayList<String>();
+
+  private static Map<String, String> mapUniqueId = new ConcurrentHashMap<String, String>();
+
+  private enum CONDITION {
+    OR, AND
+  }
+
+  @Inject
+  private ServiceLogsSolrDao serviceLogsSolrDao;
+  @Inject
+  private GraphDataGenerator graphDataGenerator;
+  @Inject
+  private SolrServiceLogConfig solrServiceLogConfig;
+
+  public ServiceLogResponse searchLogs(SearchCriteria searchCriteria) {
+    String keyword = (String) searchCriteria.getParamValue("keyword");
+    String logId = (String) searchCriteria.getParamValue("sourceLogId");
+    Boolean isLastPage = (Boolean) searchCriteria.getParamValue("isLastPage");
+
+    if (!StringUtils.isBlank(keyword)) {
+      try {
+        return (ServiceLogResponse) getPageByKeyword(searchCriteria);
+      } catch (SolrException | SolrServerException e) {
+        logger.error("Error while getting keyword=" + keyword, e);
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+    } else if (!StringUtils.isBlank(logId)) {
+      try {
+        return (ServiceLogResponse) getPageByLogId(searchCriteria);
+      } catch (SolrException e) {
+        logger.error("Error while getting keyword=" + keyword, e);
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+    } else if (isLastPage) {
+      SolrQuery lastPageQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+      ServiceLogResponse logResponse = getLastPage(searchCriteria,LogSearchConstants.LOGTIME,serviceLogsSolrDao,lastPageQuery);
+      if(logResponse == null){
+        logResponse = new ServiceLogResponse();
+      }
+      return logResponse;
+    } else {
+      SolrQuery solrQuery = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+
+      solrQuery.setParam("event", "/service/logs");
+
+      return getLogAsPaginationProvided(solrQuery,
+          serviceLogsSolrDao);
+    }
+  }
+
+  public GroupListResponse getHosts() {
+    return getFields(LogSearchConstants.SOLR_HOST, SolrHostLogData.class);
+  }
+  
+  private <T extends LogData> GroupListResponse getFields(String field, Class<T> clazz) {
+
+    SolrQuery solrQuery = new SolrQuery();
+    GroupListResponse collection = new GroupListResponse();
+    SolrUtil.setMainQuery(solrQuery, null);
+    SolrUtil.setFacetField(solrQuery,
+        field);
+    SolrUtil.setFacetSort(solrQuery, LogSearchConstants.FACET_INDEX);
+    try {
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if(response == null){
+        return collection;
+      }
+      FacetField facetField = response
+        .getFacetField(field);
+      if (facetField == null){
+        return collection;
+      }
+      List<Count> fieldList = facetField.getValues();
+      if (fieldList == null){
+        return collection;
+      }
+      SolrDocumentList docList = response.getResults();
+      if(docList == null){
+        return collection;
+      }
+      List<LogData> groupList = getLogDataListByFieldType(clazz, response, fieldList);
+
+      collection.setGroupList(groupList);
+      if(!docList.isEmpty()){
+        collection.setStartIndex((int) docList.getStart());
+        collection.setTotalCount(docList.getNumFound());
+      }
+      return collection;
+    } catch (IOException | SolrServerException | SolrException e) {
+      logger.error(e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+  }
+
+  public GroupListResponse getComponents() {
+    return getFields(LogSearchConstants.SOLR_COMPONENT, SolrComponentTypeLogData.class);
+  }
+
+  public GraphDataListResponse getAggregatedInfo(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    String hierarchy = "host,type,level";
+    GraphDataListResponse graphInfo = new GraphDataListResponse();
+    try {
+      SolrUtil.setMainQuery(solrQuery, null);
+      SolrUtil.setFacetPivot(solrQuery, 1, hierarchy);
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null) {
+        return graphInfo;
+      }
+
+      List<List<PivotField>> hirarchicalPivotField = new ArrayList<List<PivotField>>();
+      List<GraphData> dataList = new ArrayList<>();
+      NamedList<List<PivotField>> namedList = response.getFacetPivot();
+      if (namedList != null) {
+        hirarchicalPivotField = namedList.getAll(hierarchy);
+      }
+      if (!hirarchicalPivotField.isEmpty()) {
+        dataList = buidGraphData(hirarchicalPivotField.get(0));
+      }
+      if (!dataList.isEmpty()) {
+        graphInfo.setGraphData(dataList);
+      }
+
+      return graphInfo;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public List<GraphData> buidGraphData(List<PivotField> pivotFields) {
+    List<GraphData> logList = new ArrayList<>();
+    if (pivotFields != null) {
+      for (PivotField pivotField : pivotFields) {
+        if (pivotField != null) {
+          GraphData logLevel = new GraphData();
+          logLevel.setName("" + pivotField.getValue());
+          logLevel.setCount(Long.valueOf(pivotField.getCount()));
+          if (pivotField.getPivot() != null) {
+            logLevel.setDataList(buidGraphData(pivotField.getPivot()));
+          }
+          logList.add(logLevel);
+        }
+      }
+    }
+    return logList;
+  }
+
+  public CountDataListResponse getFieldCount(String field){
+    CountDataListResponse collection = new CountDataListResponse();
+    List<CountData> vCounts = new ArrayList<>();
+    SolrQuery solrQuery = new SolrQuery();
+    SolrUtil.setMainQuery(solrQuery, null);
+    if(field == null){
+      return collection;
+    }
+    SolrUtil.setFacetField(solrQuery, field);
+    try {
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null){
+        return collection;
+      }
+      FacetField facetFields = response.getFacetField(field);
+      if (facetFields == null){
+        return collection;
+      }
+      List<Count> fieldList = facetFields.getValues();
+
+      if(fieldList == null){
+        return collection;
+      }
+
+      for (Count cnt : fieldList) {
+        if (cnt != null) {
+          CountData vCount = new CountData();
+          vCount.setName(cnt.getName());
+          vCount.setCount(cnt.getCount());
+          vCounts.add(vCount);
+        }
+      }
+
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+    collection.setvCounts(vCounts);
+    return collection;
+  }
+  
+  public CountDataListResponse getLogLevelCount() {
+    return getFieldCount(LogSearchConstants.SOLR_LEVEL);
+  }
+
+  public CountDataListResponse getComponentsCount() {
+    return getFieldCount(LogSearchConstants.SOLR_COMPONENT);
+  }
+
+  public CountDataListResponse getHostsCount() {
+    return getFieldCount(LogSearchConstants.SOLR_HOST);
+  }
+
+  public List<NodeData> buidTreeData(List<PivotField> pivotFields,
+                                     List<PivotField> pivotFieldHost, SolrQuery query,
+                                     String firstPriority, String secondPriority) {
+    List<NodeData> extensionTree = new ArrayList<>();
+    String hostQuery = null;
+    if (pivotFields != null) {
+      // For Host
+      for (PivotField pivotHost : pivotFields) {
+        if (pivotHost != null) {
+          NodeData hostNode = new NodeData();
+          String name = (pivotHost.getValue() == null ? "" : ""+ pivotHost.getValue());
+          String value = "" + pivotHost.getCount();
+          if(!StringUtils.isBlank(name)){
+            hostNode.setName(name);
+          }
+          if(!StringUtils.isBlank(value)){
+            hostNode.setValue(value);
+          }
+          if(!StringUtils.isBlank(firstPriority)){
+            hostNode.setType(firstPriority);
+          }
+
+          hostNode.setParent(true);
+          hostNode.setRoot(true);
+          PivotField hostPivot = null;
+          for (PivotField searchHost : pivotFieldHost) {
+            if (!StringUtils.isBlank(hostNode.getName())
+                && hostNode.getName().equals(searchHost.getValue())) {
+              hostPivot = searchHost;
+              break;
+            }
+          }
+          List<PivotField> pivotLevelHost = hostPivot.getPivot();
+          if (pivotLevelHost != null) {
+            Collection<NameValueData> logLevelCount = new ArrayList<>();
+            for (PivotField pivotLevel : pivotLevelHost) {
+              if (pivotLevel != null) {
+                NameValueData vnameValue = new NameValueData();
+                String levelName = (pivotLevel.getValue() == null ? "" : ""
+                    + pivotLevel.getValue());
+                vnameValue.setName(levelName.toUpperCase());
+                vnameValue.setValue("" + pivotLevel.getCount());
+                logLevelCount.add(vnameValue);
+              }
+            }
+            hostNode.setLogLevelCount(logLevelCount);
+          }
+
+          query.addFilterQuery(hostQuery);
+          List<PivotField> pivotComponents = pivotHost.getPivot();
+          // For Components
+          if (pivotComponents != null) {
+            Collection<NodeData> componentNodes = new ArrayList<>();
+            for (PivotField pivotComp : pivotComponents) {
+              if (pivotComp != null) {
+                NodeData compNode = new NodeData();
+                String compName = (pivotComp.getValue() == null ? "" : ""
+                    + pivotComp.getValue());
+                compNode.setName(compName);
+                if (!StringUtils.isBlank(secondPriority)) {
+                  compNode.setType(secondPriority);
+                }
+                compNode.setValue("" + pivotComp.getCount());
+                compNode.setParent(false);
+                compNode.setRoot(false);
+                List<PivotField> pivotLevels = pivotComp.getPivot();
+                if (pivotLevels != null) {
+                  Collection<NameValueData> logLevelCount = new ArrayList<>();
+                  for (PivotField pivotLevel : pivotLevels) {
+                    if (pivotLevel != null) {
+                      NameValueData vnameValue = new NameValueData();
+                      String compLevel = pivotLevel.getValue() == null ? ""
+                          : "" + pivotLevel.getValue();
+                      vnameValue.setName((compLevel).toUpperCase());
+
+                      vnameValue.setValue("" + pivotLevel.getCount());
+                      logLevelCount.add(vnameValue);
+                    }
+                  }
+                  compNode.setLogLevelCount(logLevelCount);
+                }
+                componentNodes.add(compNode);
+              }}
+            hostNode.setChilds(componentNodes);
+          }
+          extensionTree.add(hostNode);
+        }}
+    }
+
+    return extensionTree;
+  }
+
+  public NodeListResponse getTreeExtension(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    solrQuery.setParam("event", "/getTreeExtension");
+
+    if (searchCriteria.getSortBy() == null) {
+      searchCriteria.setSortBy(LogSearchConstants.SOLR_HOST);
+      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
+    }
+    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
+    String hostName = ""
+      + ((searchCriteria.getParamValue("hostName") == null) ? ""
+      : searchCriteria.getParamValue("hostName"));
+    if (!StringUtils.isBlank(hostName)){
+      solrQuery.addFilterQuery(LogSearchConstants.SOLR_HOST + ":*"
+        + hostName + "*");
+    }
+    String firstHirarchy = "host,type,level";
+    String secondHirarchy = "host,level";
+    NodeListResponse list = new NodeListResponse();
+    try {
+
+      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
+        secondHirarchy);
+
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      List<List<PivotField>> listFirstHirarchicalPivotFields = new ArrayList<List<PivotField>>();
+      NamedList<List<PivotField>> firstNamedList = response
+        .getFacetPivot();
+      if (firstNamedList != null) {
+        listFirstHirarchicalPivotFields = firstNamedList
+          .getAll(firstHirarchy);
+      }
+      List<List<PivotField>> listSecondHirarchicalPivotFields = new ArrayList<List<PivotField>>();
+      NamedList<List<PivotField>> secondNamedList = response
+        .getFacetPivot();
+      if (secondNamedList != null) {
+        listSecondHirarchicalPivotFields = secondNamedList
+          .getAll(secondHirarchy);
+      }
+      List<PivotField> firstHirarchicalPivotFields = new ArrayList<PivotField>();
+      List<PivotField> secondHirarchicalPivotFields = new ArrayList<PivotField>();
+      if (!listFirstHirarchicalPivotFields.isEmpty()) {
+        firstHirarchicalPivotFields = listFirstHirarchicalPivotFields
+          .get(0);
+      }
+      if (!listSecondHirarchicalPivotFields.isEmpty()) {
+        secondHirarchicalPivotFields = listSecondHirarchicalPivotFields
+          .get(0);
+      }
+      List<NodeData> dataList = buidTreeData(firstHirarchicalPivotFields,
+        secondHirarchicalPivotFields, solrQuery,
+        LogSearchConstants.HOST, LogSearchConstants.COMPONENT);
+
+      list.setvNodeList(dataList);
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+
+    return list;
+  }
+
+  public NodeListResponse getHostListByComponent(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    solrQuery.setParam("event", "/service/hosts/components");
+
+    NodeListResponse list = new NodeListResponse();
+    if (searchCriteria.getSortBy() == null) {
+      searchCriteria.setSortBy(LogSearchConstants.SOLR_HOST);
+      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
+    }
+    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
+    String componentName = ""
+      + ((searchCriteria.getParamValue("componentName") == null) ? ""
+      : searchCriteria.getParamValue("componentName"));
+    if (!StringUtils.isBlank(componentName)){
+      solrQuery.addFilterQuery(LogSearchConstants.SOLR_COMPONENT + ":"
+        + componentName);
+    } else {
+      return list;
+    }
+
+    String firstHirarchy = "type,host,level";
+    String secondHirarchy = "type,level";
+
+    try {
+      SolrUtil.setFacetPivot(solrQuery, 1, firstHirarchy,
+        secondHirarchy);
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      List<List<PivotField>> firstHirarchicalPivotFields = null;
+      List<List<PivotField>> secondHirarchicalPivotFields = null;
+      NamedList<List<PivotField>> firstNamedList = response
+        .getFacetPivot();
+      if (firstNamedList != null) {
+        firstHirarchicalPivotFields = firstNamedList
+          .getAll(firstHirarchy);
+        secondHirarchicalPivotFields = firstNamedList
+          .getAll(secondHirarchy);
+      }
+
+      if (firstHirarchicalPivotFields == null
+        || secondHirarchicalPivotFields == null) {
+        return list;
+      }
+
+      List<NodeData> dataList = buidTreeData(
+        firstHirarchicalPivotFields.get(0),
+        secondHirarchicalPivotFields.get(0), solrQuery,
+        LogSearchConstants.COMPONENT, LogSearchConstants.HOST);
+      if(dataList == null){
+        return list;
+      }
+
+      list.setvNodeList(dataList);
+      return list;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public NameValueDataListResponse getLogsLevelCount(SearchCriteria sc) {
+    NameValueDataListResponse nameValueList = new NameValueDataListResponse();
+    SolrQuery query = queryGenerator.commonServiceFilterQuery(sc);
+    query.setParam("event", "/service/logs/levels/counts/namevalues");
+    List<NameValueData> logsCounts = getLogLevelFacets(query);
+    nameValueList.setvNameValues(logsCounts);
+
+    return nameValueList;
+  }
+
+  public List<NameValueData> getLogLevelFacets(SolrQuery query) {
+    String defalutValue = "0";
+    HashMap<String, String> map = new HashMap<String, String>();
+    List<NameValueData> logsCounts = new ArrayList<>();
+    try {
+      SolrUtil.setFacetField(query, LogSearchConstants.SOLR_LEVEL);
+      List<Count> logLevelCounts = getFacetCounts(query,
+          LogSearchConstants.SOLR_LEVEL);
+      if (logLevelCounts == null) {
+        return logsCounts;
+      }
+      for (Count count : logLevelCounts) {
+        map.put(count.getName().toUpperCase(), "" + count.getCount());
+      }
+      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
+        NameValueData nameValue = new NameValueData();
+        String value = map.get(level);
+        if (StringUtils.isBlank(value)) {
+          value = defalutValue;
+        }
+        nameValue.setName(level);
+        nameValue.setValue(value);
+        logsCounts.add(nameValue);
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + query, e);
+    }
+    return logsCounts;
+  }
+
+  // Get Facet Count According to FacetFeild
+  public List<Count> getFacetCounts(SolrQuery solrQuery, String facetField)
+    throws SolrServerException, IOException, SolrException {
+    List<Count> list = new ArrayList<FacetField.Count>();
+
+    QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+    if(response == null){
+      return list;
+    }
+
+    FacetField field = response.getFacetField(facetField);
+    if (field == null) {
+      return list;
+    }
+    list = field.getValues();
+
+
+    return list;
+  }
+
+  public LogListResponse getPageByKeyword(SearchCriteria searchCriteria)
+    throws SolrServerException {
+    String defaultChoice = "0";
+
+    String key = (String) searchCriteria.getParamValue("keyword");
+    if(StringUtils.isBlank(key)){
+      throw RESTErrorUtil.createRESTException("Keyword was not given",
+          MessageEnums.DATA_NOT_FOUND);
+    }
+
+    String keyword = SolrUtil.escapeForStandardTokenizer(key);
+
+    if(keyword.startsWith("\"") && keyword.endsWith("\"")){
+      keyword = keyword.substring(1);
+      keyword = keyword.substring(0, keyword.length()-1);
+    }
+    keyword = "*" + keyword + "*";
+
+
+    String keyType = (String) searchCriteria.getParamValue("keywordType");
+    QueryResponse queryResponse = null;
+
+    if (!defaultChoice.equals(keyType)) {
+      try {
+        int currentPageNumber = searchCriteria.getPage();
+        int maxRows = searchCriteria.getMaxRows();
+        String nextPageLogID = "";
+
+        int lastLogIndexNumber = ((currentPageNumber + 1)
+          * maxRows);
+        String nextPageLogTime = "";
+
+
+        // Next Page Start Time Calculation
+        SolrQuery nextPageLogTimeQuery = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        nextPageLogTimeQuery.remove("start");
+        nextPageLogTimeQuery.remove("rows");
+        nextPageLogTimeQuery.setStart(lastLogIndexNumber);
+        nextPageLogTimeQuery.setRows(1);
+
+        queryResponse = serviceLogsSolrDao.process(
+            nextPageLogTimeQuery);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docList = queryResponse.getResults();
+        if(docList ==null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocument solrDoc = docList.get(0);
+
+        Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
+        if(logDate == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        nextPageLogTime = DateUtil
+          .convertDateWithMillisecondsToSolrDate(logDate);
+        nextPageLogID = ""
+          + solrDoc.get(LogSearchConstants.ID);
+
+        if (StringUtils.isBlank(nextPageLogID)){
+          nextPageLogID = "0";
+        }
+
+        String filterQueryListIds = "";
+        // Remove the same Time Ids
+        SolrQuery listRemoveIds = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        listRemoveIds.remove("start");
+        listRemoveIds.remove("rows");
+        queryGenerator.setSingleIncludeFilter(listRemoveIds,
+          LogSearchConstants.LOGTIME, "\"" + nextPageLogTime + "\"");
+        queryGenerator.setSingleExcludeFilter(listRemoveIds,
+          LogSearchConstants.ID, nextPageLogID);
+        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
+        queryResponse = serviceLogsSolrDao.process(
+            listRemoveIds);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docListIds = queryResponse.getResults();
+        if(docListIds ==null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        boolean isFirst = true;
+        for (SolrDocument solrDocId :  docListIds ) {
+          String id = "" + solrDocId.get(LogSearchConstants.ID);
+          if (isFirst) {
+            filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
+            isFirst = false;
+          } else {
+            filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
+          }
+        }
+
+        // Keyword Sequence Number Calculation
+        String endTime = (String) searchCriteria.getParamValue("to");
+        String startTime = (String) searchCriteria
+          .getParamValue("from");
+        SolrQuery logTimeThroughRangeQuery = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        logTimeThroughRangeQuery.remove("start");
+        logTimeThroughRangeQuery.remove("rows");
+        logTimeThroughRangeQuery.setRows(1);
+        if (!StringUtils.isBlank(filterQueryListIds)){
+          logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
+        }
+
+        String sortByType = searchCriteria.getSortType();
+
+        if (!StringUtils.isBlank(sortByType) && sortByType
+          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
+
+          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
+            LogSearchConstants.LOGTIME, nextPageLogTime,
+            endTime);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
+            LogSearchConstants.LOGTIME + " "
+              + LogSearchConstants.ASCENDING_ORDER);
+
+        } else {
+
+          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
+            LogSearchConstants.LOGTIME, startTime,
+            nextPageLogTime);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
+            LogSearchConstants.LOGTIME + " "
+              + LogSearchConstants.DESCENDING_ORDER);
+        }
+        queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
+          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
+
+
+        queryResponse = serviceLogsSolrDao.process(
+            logTimeThroughRangeQuery);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList documentList = queryResponse.getResults();
+        if(documentList ==null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocument solrDocument = new SolrDocument();
+        if (!documentList.isEmpty()){
+          solrDocument = documentList.get(0);
+        }
+
+        Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
+        if(keywordLogDate == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        String originalKeywordDate = DateUtil
+          .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+        String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
+
+        // Getting Range Count from StartTime To Keyword Log Time
+        SolrQuery rangeLogQuery = nextPageLogTimeQuery.getCopy();
+        rangeLogQuery.remove("start");
+        rangeLogQuery.remove("rows");
+
+        if (!StringUtils.isBlank(sortByType) && sortByType
+          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
+          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, 1);
+          String keywordDateTime = DateUtil
+            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+          queryGenerator.setSingleRangeFilter(rangeLogQuery,
+            LogSearchConstants.LOGTIME, startTime,
+            keywordDateTime);
+        } else {
+          keywordLogDate = DateUtils.addMilliseconds(keywordLogDate, -1);
+          String keywordDateTime = DateUtil
+            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+          queryGenerator.setSingleRangeFilter(rangeLogQuery,
+            LogSearchConstants.LOGTIME, keywordDateTime,
+            endTime);
+        }
+
+
+        long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
+
+
+        //Adding numbers on
+
+
+        try {
+          SolrQuery sameIdQuery = queryGenerator
+            .commonServiceFilterQuery(searchCriteria);
+          queryGenerator.setSingleIncludeFilter(sameIdQuery,
+            LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
+          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
+          SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
+            .getResults();
+          for (SolrDocument solrDocumenent : sameQueryDocList) {
+            String id = (String) solrDocumenent
+              .getFieldValue(LogSearchConstants.ID);
+            countNumberLogs++;
+           
+            if (StringUtils.isBlank(id) && id.equals(keywordId)){
+              break;
+            }
+          }
+        } catch (SolrException | SolrServerException | IOException e) {
+          logger.error(e);
+        }
+
+        int start = (int) ((countNumberLogs / maxRows) * maxRows);
+        SolrQuery logIdQuery = nextPageLogTimeQuery.getCopy();
+        rangeLogQuery.remove("start");
+        rangeLogQuery.remove("rows");
+        logIdQuery.setStart(start);
+        logIdQuery.setRows(searchCriteria.getMaxRows());
+        LogListResponse logResponse = getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
+        return logResponse;
+
+      } catch (Exception e) {
+        //do nothing
+      }
+
+    } else {
+      try {
+        int currentPageNumber = searchCriteria.getPage();
+        int maxRows = searchCriteria.getMaxRows();
+
+        if (currentPageNumber == 0) {
+          throw RESTErrorUtil.createRESTException("This is first Page Not",
+            MessageEnums.DATA_NOT_FOUND);
+        }
+
+        int firstLogCurrentPage = (currentPageNumber * maxRows);
+        String lastLogsLogTime = "";
+
+        // Next Page Start Time Calculation
+        SolrQuery lastLogTime = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        lastLogTime.remove("start");
+        lastLogTime.remove("rows");
+
+        lastLogTime.setStart(firstLogCurrentPage);
+        lastLogTime.setRows(1);
+
+        queryResponse = serviceLogsSolrDao.process(
+            lastLogTime);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docList = queryResponse.getResults();
+        if(docList ==null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        SolrDocument solrDoc = docList.get(0);
+
+        Date logDate = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
+        String sortByType = searchCriteria.getSortType();
+        lastLogsLogTime = DateUtil
+          .convertDateWithMillisecondsToSolrDate(logDate);
+        String lastLogsLogId = ""
+          + solrDoc.get(LogSearchConstants.ID);
+
+
+        String filterQueryListIds = "";
+        // Remove the same Time Ids
+        SolrQuery listRemoveIds = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        listRemoveIds.remove("start");
+        listRemoveIds.remove("rows");
+        queryGenerator.setSingleIncludeFilter(listRemoveIds,
+          LogSearchConstants.LOGTIME, "\"" + lastLogsLogTime + "\"");
+        queryGenerator.setSingleExcludeFilter(listRemoveIds,
+          LogSearchConstants.ID, lastLogsLogId);
+        SolrUtil.setFl(listRemoveIds, LogSearchConstants.ID);
+        queryResponse = serviceLogsSolrDao.process(
+            lastLogTime);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList docListIds = queryResponse.getResults();
+        if(docListIds == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        boolean isFirst = true;
+        for (SolrDocument solrDocId : docListIds) {
+          if (solrDocId != null) {
+            String id = "" + solrDocId.get(LogSearchConstants.ID);
+            if (isFirst) {
+              filterQueryListIds += LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":" + id;
+              isFirst = false;
+            } else {
+              filterQueryListIds += " "+CONDITION.AND+" " + LogSearchConstants.MINUS_OPERATOR + LogSearchConstants.ID + ":"
+                  + id;
+            }
+          }
+        }
+
+
+        // Keyword LogTime Calculation
+        String endTime = (String) searchCriteria.getParamValue("to");
+        String startTime = (String) searchCriteria
+          .getParamValue("from");
+        SolrQuery logTimeThroughRangeQuery = queryGenerator
+          .commonServiceFilterQuery(searchCriteria);
+        logTimeThroughRangeQuery.remove("start");
+        logTimeThroughRangeQuery.remove("rows");
+        logTimeThroughRangeQuery.setRows(1);
+        queryGenerator.setSingleExcludeFilter(logTimeThroughRangeQuery,
+          LogSearchConstants.ID, lastLogsLogId);
+        if (!StringUtils.isBlank(filterQueryListIds)){
+          logTimeThroughRangeQuery.setFilterQueries(filterQueryListIds);
+        }
+
+        if (!StringUtils.isBlank(sortByType) && sortByType
+          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
+
+          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
+            LogSearchConstants.LOGTIME + " "
+              + LogSearchConstants.DESCENDING_ORDER);
+
+
+          queryGenerator.setSingleRangeFilter(
+            logTimeThroughRangeQuery,
+            LogSearchConstants.LOGTIME, startTime,
+            lastLogsLogTime);
+
+        } else {
+
+          logTimeThroughRangeQuery.remove(LogSearchConstants.SORT);
+          logTimeThroughRangeQuery.set(LogSearchConstants.SORT,
+            LogSearchConstants.LOGTIME + " "
+              + LogSearchConstants.ASCENDING_ORDER);
+
+
+          queryGenerator.setSingleRangeFilter(logTimeThroughRangeQuery,
+            LogSearchConstants.LOGTIME, lastLogsLogTime, endTime);
+        }
+        queryGenerator.setSingleIncludeFilter(logTimeThroughRangeQuery,
+          LogSearchConstants.SOLR_KEY_LOG_MESSAGE, keyword);
+
+
+        queryResponse = serviceLogsSolrDao.process(
+            logTimeThroughRangeQuery);
+        if(queryResponse == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+
+        SolrDocumentList documentList = queryResponse.getResults();
+        if(documentList == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        SolrDocument solrDocument = new SolrDocument();
+        if (!documentList.isEmpty()){
+          solrDocument = documentList.get(0);
+        }
+
+        Date keywordLogDate = (Date) solrDocument.get(LogSearchConstants.LOGTIME);
+        if(keywordLogDate == null){
+          throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+              MessageEnums.ERROR_SYSTEM);
+        }
+        String originalKeywordDate = DateUtil
+          .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+        String keywordId = "" + solrDocument.get(LogSearchConstants.ID);
+
+        // Getting Range Count from StartTime To Keyword Log Time
+        SolrQuery rangeLogQuery = lastLogTime.getCopy();
+        rangeLogQuery.remove("start");
+        rangeLogQuery.remove("rows");
+
+        if (!StringUtils.isBlank(sortByType) && sortByType
+          .equalsIgnoreCase(LogSearchConstants.ASCENDING_ORDER)) {
+       //   keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, 1);
+          String keywordDateTime = DateUtil
+            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+          queryGenerator.setSingleRangeFilter(rangeLogQuery,
+            LogSearchConstants.LOGTIME, startTime,
+            keywordDateTime);
+
+
+        } else {
+     //     keywordLogDate = DateUtil.addMilliSecondsToDate(keywordLogDate, -1);
+          String keywordDateTime = DateUtil
+            .convertDateWithMillisecondsToSolrDate(keywordLogDate);
+          queryGenerator.setSingleRangeFilter(rangeLogQuery,
+            LogSearchConstants.LOGTIME, keywordDateTime,
+            endTime);
+        }
+
+
+        long countNumberLogs = countQuery(rangeLogQuery,serviceLogsSolrDao) - 1;
+
+        //Adding numbers on
+        try {
+          SolrQuery sameIdQuery = queryGenerator
+            .commonServiceFilterQuery(searchCriteria);
+          queryGenerator.setSingleIncludeFilter(sameIdQuery,
+            LogSearchConstants.LOGTIME, "\"" + originalKeywordDate + "\"");
+          SolrUtil.setFl(sameIdQuery, LogSearchConstants.ID);
+          SolrDocumentList sameQueryDocList = serviceLogsSolrDao.process(sameIdQuery)
+            .getResults();
+          for (SolrDocument solrDocumenent : sameQueryDocList) {
+            if (solrDocumenent != null) {
+              String id = (String) solrDocumenent
+                  .getFieldValue(LogSearchConstants.ID);
+              countNumberLogs++;
+              if ( StringUtils.isBlank(id) && id.equals(keywordId)) {
+                break;
+              }
+            }
+          }
+        } catch (SolrException | SolrServerException | IOException e) {
+          logger.error(e);
+        }
+        int start = (int) ((countNumberLogs / maxRows) * maxRows);
+
+        SolrQuery logIdQuery = lastLogTime.getCopy();
+        rangeLogQuery.remove("start");
+        rangeLogQuery.remove("rows");
+        logIdQuery.setStart(start);
+        logIdQuery.setRows(searchCriteria.getMaxRows());
+        return getLogAsPaginationProvided(logIdQuery, serviceLogsSolrDao);
+      } catch (Exception e) {
+        //do nothing
+      }
+
+    }
+    throw RESTErrorUtil.createRESTException("The keyword "+"\""+key+"\""+" was not found",
+        MessageEnums.ERROR_SYSTEM);
+  }
+
+  private LogSearchResponse getPageByLogId(SearchCriteria searchCriteria) {
+    LogSearchResponse logResponse = new ServiceLogResponse();
+    String endLogTime = (String) searchCriteria.getParamValue("to");
+    if(StringUtils.isBlank(endLogTime)){
+      return logResponse;
+    }
+    long startIndex = 0l;
+
+    String logId = (String) searchCriteria.getParamValue("sourceLogId");
+    if(StringUtils.isBlank(logId)){
+      return logResponse;
+    }
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+
+    String endTimeMinusOneMilli = "";
+    String logTime = "";
+    try {
+
+      SolrQuery logTimeByIdQuery = new SolrQuery();
+      SolrUtil.setMainQuery(logTimeByIdQuery, null);
+      queryGenerator.setSingleIncludeFilter(logTimeByIdQuery,
+          LogSearchConstants.ID, logId);
+      SolrUtil.setRowCount(solrQuery, 1);
+
+      QueryResponse queryResponse = serviceLogsSolrDao
+          .process(logTimeByIdQuery);
+
+      if(queryResponse == null){
+        return new ServiceLogResponse();
+      }
+
+      SolrDocumentList docList = queryResponse.getResults();
+      Date dateOfLogId = null;
+      if (docList != null && !docList.isEmpty()) {
+        SolrDocument dateLogIdDoc = docList.get(0);
+        if(dateLogIdDoc != null){
+          dateOfLogId = (Date) dateLogIdDoc.get(LogSearchConstants.LOGTIME);
+        }
+      }
+
+      if (dateOfLogId != null) {
+        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(dateOfLogId);
+        Date endDate = DateUtils.addMilliseconds(dateOfLogId, 1);
+        endTimeMinusOneMilli = (String) DateUtil
+            .convertDateWithMillisecondsToSolrDate(endDate);
+      }
+
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error(e);
+    }
+
+    try {
+      solrQuery.remove(LogSearchConstants.ID);
+      solrQuery.remove(LogSearchConstants.LOGTIME);
+      queryGenerator.setSingleRangeFilter(solrQuery,
+          LogSearchConstants.LOGTIME, endTimeMinusOneMilli, endLogTime);
+      SolrUtil.setRowCount(solrQuery, 0);
+      startIndex = countQuery(solrQuery,serviceLogsSolrDao);
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error(e);
+    }
+
+    try {
+      SolrQuery sameIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+      queryGenerator.setSingleIncludeFilter(sameIdQuery,
+          LogSearchConstants.LOGTIME, "\"" + logTime + "\"");
+      sameIdQuery.set("fl", LogSearchConstants.ID);
+
+      QueryResponse sameIdResponse = serviceLogsSolrDao.process(sameIdQuery);
+      SolrDocumentList docList = sameIdResponse.getResults();
+
+      for (SolrDocument solrDocumenent : docList) {
+        String id = (String) solrDocumenent
+            .getFieldValue(LogSearchConstants.ID);
+        startIndex++;
+        if (!StringUtils.isBlank(id)) {
+          if (id.equals(logId)) {
+            break;
+          }
+        }
+      }
+
+      SolrQuery logIdQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+      logIdQuery.remove("rows");
+      logIdQuery.remove("start");
+      int start = (int) ((startIndex / searchCriteria.getMaxRows()) * searchCriteria
+          .getMaxRows());
+      logIdQuery.setStart(start);
+      logIdQuery.setRows(searchCriteria.getMaxRows());
+      logResponse = getLogAsPaginationProvided(logIdQuery,
+          serviceLogsSolrDao);
+      return logResponse;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error(e);
+    }
+
+    throw RESTErrorUtil.createRESTException("LogId not Found",
+        MessageEnums.ERROR_SYSTEM);
+  }
+
+  @SuppressWarnings("unchecked")
+  public List<NameValueData> getHistogramCounts(SolrQuery solrQuery,
+                                             String from, String to, String unit) {
+    List<NameValueData> logsCounts = new ArrayList<>();
+    try {
+
+      SolrUtil.setFacetRange(solrQuery, LogSearchConstants.LOGTIME,
+        from, to, unit);
+
+      List<RangeFacet.Count> logLevelCounts = null;
+
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if(response == null){
+        return logsCounts;
+      }
+      @SuppressWarnings("rawtypes")
+      List<RangeFacet> rangeFacetList = response.getFacetRanges();
+      if (rangeFacetList == null) {
+        return logsCounts;
+
+      }
+
+      @SuppressWarnings("rawtypes")
+      RangeFacet rangeFacet=rangeFacetList.get(0);
+      if (rangeFacet == null) {
+        return logsCounts;
+      }
+      logLevelCounts = rangeFacet.getCounts();
+
+      if(logLevelCounts == null){
+        return logsCounts;
+      }
+      for (RangeFacet.Count logCount : logLevelCounts) {
+        NameValueData nameValue = new NameValueData();
+        nameValue.setName(logCount.getValue());
+        nameValue.setValue("" + logCount.getCount());
+        logsCounts.add(nameValue);
+      }
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+    }
+    return logsCounts;
+  }
+
+  public List<Count> getFacetCountsByDate(SolrQuery solrQuery,
+                                          String facetField) throws SolrServerException, IOException,
+    SolrException {
+
+    QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+
+    FacetField field = response.getFacetDate(facetField);
+    return field.getValues();
+  }
+
+  @SuppressWarnings("unchecked")
+  public BarGraphDataListResponse getHistogramData(SearchCriteria searchCriteria) {
+    String deafalutValue = "0";
+    BarGraphDataListResponse dataList = new BarGraphDataListResponse();
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    solrQuery.set("event", "/audit/logs/histogram");
+    String from = getFrom((String) searchCriteria.getParamValue("from"));
+    String to = getTo((String) searchCriteria.getParamValue("to"));
+    String unit = getUnit((String) searchCriteria.getParamValue("unit"));
+
+    List<BarGraphData> histogramData = new ArrayList<>();
+
+    String jsonHistogramQuery = queryGenerator
+      .buildJSONFacetTermTimeRangeQuery(
+        LogSearchConstants.SOLR_LEVEL,
+        LogSearchConstants.LOGTIME, from, to, unit).replace(
+        "\\", "");
+
+    try {
+      SolrUtil.setJSONFacet(solrQuery, jsonHistogramQuery);
+      SolrUtil.setRowCount(solrQuery,Integer.parseInt(deafalutValue));
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null){
+        return dataList;
+      }
+      SimpleOrderedMap<Object> jsonFacetResponse = (SimpleOrderedMap<Object>) response
+        .getResponse().get("facets");
+
+      if (jsonFacetResponse == null
+        || jsonFacetResponse.toString().equals("{count=0}")){
+        return dataList;
+      }
+
+      extractValuesFromBuckets(jsonFacetResponse, "x", "y", histogramData);
+
+      Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
+      List<BarGraphData> graphDatas = new ArrayList<BarGraphData>();
+      for (String level : LogSearchConstants.SUPPORTED_LOG_LEVEL) {
+        boolean isLevelPresent = false;
+        BarGraphData vData1 = null;
+        for (BarGraphData vData2 : histogramData) {
+          String name = vData2.getName();
+          if (level.contains(name)) {
+            isLevelPresent = true;
+            vData1 = vData2;
+            break;
+          }
+          if (vNameValues.isEmpty()) {
+            Collection<NameValueData> vNameValues2 = vData2
+              .getDataCount();
+            for (NameValueData value : vNameValues2) {
+              NameValueData value2 = new NameValueData();
+              value2.setValue(deafalutValue);
+              value2.setName(value.getName());
+              vNameValues.add(value2);
+            }
+          }
+        }
+        if (!isLevelPresent) {
+          BarGraphData vBarGraphData = new BarGraphData();
+          vBarGraphData.setName(level);
+          vBarGraphData.setDataCount(vNameValues);
+          graphDatas.add(vBarGraphData);
+        } else {
+          graphDatas.add(vData1);
+        }
+      }
+
+      dataList.setGraphData(graphDatas);
+      return dataList;
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error(e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+
+    }
+  }
+
+  public void arrangeLevel(String level,
+                           List<BarGraphData> histogramDataLocal,
+                           List<BarGraphData> histogramData) {
+    for (BarGraphData histData : histogramData) {
+      if (histData != null && level.equals(histData.getName())) {
+        histogramDataLocal.add(histData);
+      }
+    }
+  }
+
+  public String cancelFindRequestByDate(String uniqueId) {
+    if (StringUtils.isEmpty(uniqueId)) {
+      logger.error("Unique id is Empty");
+      throw RESTErrorUtil.createRESTException("Unique id is Empty",
+        MessageEnums.DATA_NOT_FOUND);
+    }
+
+    if (cancelByDate.remove(uniqueId)) {
+      mapUniqueId.remove(uniqueId);
+      return "Cancel Request Successfully Procssed ";
+    }
+    return "Cancel Request Unable to Process";
+  }
+
+  public boolean cancelRequest(String uniqueId) {
+    if (StringUtils.isBlank(uniqueId)) {
+      logger.error("Unique id is Empty");
+      throw RESTErrorUtil.createRESTException("Unique id is Empty",
+        MessageEnums.DATA_NOT_FOUND);
+    }
+    for (String date : cancelByDate) {
+      if (uniqueId.equalsIgnoreCase(date)){
+        return false;
+      }
+    }
+    return true;
+  }
+
+  public Response exportToTextFile(SearchCriteria searchCriteria) {
+    String defaultFormat = "text";
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    String from = (String) searchCriteria.getParamValue("from");
+    String to = (String) searchCriteria.getParamValue("to");
+    String utcOffset = (String) searchCriteria.getParamValue("utcOffset");
+    String format = (String) searchCriteria.getParamValue("format");
+
+    format = defaultFormat.equalsIgnoreCase(format) && format != null ? ".txt"
+        : ".json";
+    
+    if(StringUtils.isBlank(utcOffset)){
+      utcOffset = "0";
+    }
+
+    if (!DateUtil.isDateValid(from) || !DateUtil.isDateValid(to)) {
+      logger.error("Not valid date format. Valid format should be"
+          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z);
+      throw RESTErrorUtil.createRESTException("Not valid date format. Valid format should be"
+          + LogSearchConstants.SOLR_DATE_FORMAT_PREFIX_Z,
+          MessageEnums.INVALID_INPUT_DATA);
+
+    } else {
+      from = from.replace("T", " ");
+      from = from.replace(".", ",");
+
+      to = to.replace("T", " ");
+      to = to.replace(".", ",");
+
+      to = DateUtil.addOffsetToDate(to, Long.parseLong(utcOffset),
+          "yyyy-MM-dd HH:mm:ss,SSS");
+      from = DateUtil.addOffsetToDate(from, Long.parseLong(utcOffset),
+          "yyyy-MM-dd HH:mm:ss,SSS");
+    }
+
+    String fileName = DateUtil.getCurrentDateInString();
+    if (searchCriteria.getParamValue("hostLogFile") != null
+      && searchCriteria.getParamValue("compLogFile") != null) {
+      fileName = searchCriteria.getParamValue("hostLogFile") + "_"
+        + searchCriteria.getParamValue("compLogFile");
+    }
+
+    String textToSave = "";
+    try {
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if (response == null) {
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+      SolrDocumentList docList = response.getResults();
+      if (docList == null) {
+        throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+            .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+      }
+
+      VSummary vsummary = BizUtil.buildSummaryForLogFile(docList);
+      vsummary.setFormat(format);
+      vsummary.setFrom(from);
+      vsummary.setTo(to);
+
+      String includeString = (String) searchCriteria.getParamValue("iMessage");
+      if (StringUtils.isBlank(includeString)) {
+        includeString = "";
+      }
+
+      String include[] = includeString.split(LogSearchConstants.I_E_SEPRATOR);
+
+      for (String inc : include) {
+        includeString = includeString + ",\"" + inc + "\"";
+      }
+      includeString = includeString.replaceFirst(",", "");
+      if (!StringUtils.isBlank(includeString)) {
+        vsummary.setIncludeString(includeString);
+      }
+
+      String excludeString = null;
+      boolean isNormalExcluded = false;
+
+      excludeString = (String) searchCriteria.getParamValue("eMessage");
+      if (StringUtils.isBlank(excludeString)) {
+        excludeString = "";
+      }
+
+      String exclude[] = excludeString.split(LogSearchConstants.I_E_SEPRATOR);
+      for (String exc : exclude) {
+        excludeString = excludeString + ",\"" + exc + "\"";
+      }
+
+      excludeString = excludeString.replaceFirst(",", "");
+      if (!StringUtils.isBlank(excludeString)) {
+        vsummary.setExcludeString(excludeString);
+        isNormalExcluded = true;
+      }
+
+      String globalExcludeString = (String) searchCriteria
+          .getParamValue("gEMessage");
+      if (StringUtils.isBlank(globalExcludeString)) {
+        globalExcludeString = "";
+      }
+
+      String globalExclude[] = globalExcludeString
+          .split(LogSearchConstants.I_E_SEPRATOR);
+
+      for (String exc : globalExclude) {
+        excludeString = excludeString + ",\"" + exc + "\"";
+      }
+
+      if (!StringUtils.isBlank(excludeString)) {
+        if (!isNormalExcluded) {
+          excludeString = excludeString.replaceFirst(",", "");
+        }
+        vsummary.setExcludeString(excludeString);
+      }
+
+      for (SolrDocument solrDoc : docList) {
+
+        Date logTimeDateObj = (Date) solrDoc.get(LogSearchConstants.LOGTIME);
+        if(logTimeDateObj != null){
+        String logTime = DateUtil.convertSolrDateToNormalDateFormat(
+            logTimeDateObj.getTime(), Long.parseLong(utcOffset));
+        solrDoc.remove(LogSearchConstants.LOGTIME);
+        solrDoc.addField(LogSearchConstants.LOGTIME, logTime);
+        }
+      }
+
+      if (format.toLowerCase(Locale.ENGLISH).equals(".txt")) {
+        textToSave = BizUtil.convertObjectToNormalText(docList);
+      } else if (format.toLowerCase(Locale.ENGLISH).equals(".json")) {
+        textToSave = convertObjToString(docList);
+      } else {
+        throw RESTErrorUtil.createRESTException(
+            "unsoported format either should be json or text",
+            MessageEnums.ERROR_SYSTEM);
+      }
+      return FileUtil.saveToFile(textToSave, fileName, vsummary);
+
+    } catch (SolrException | SolrServerException | IOException
+      | ParseException e) {
+      logger.error("Error during solrQuery=" + solrQuery, e);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public NodeListResponse getComponentListWithLevelCounts(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    solrQuery.setParam("event", "/service/logs/components/levels/counts");
+
+    if (searchCriteria.getSortBy() == null) {
+      searchCriteria.setSortBy(LogSearchConstants.SOLR_COMPONENT);
+      searchCriteria.setSortType(SolrQuery.ORDER.asc.toString());
+    }
+    queryGenerator.setFilterFacetSort(solrQuery, searchCriteria);
+    String componentLevelHirachy = "type,level";
+    NodeListResponse list = new NodeListResponse();
+    try {
+
+      SolrUtil.setFacetPivot(solrQuery, 1, componentLevelHirachy);
+
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+
+      List<List<PivotField>> listPivotField = new ArrayList<List<PivotField>>();
+      NamedList<List<PivotField>> namedList = response.getFacetPivot();
+      if (namedList != null) {
+        listPivotField = namedList.getAll(componentLevelHirachy);
+      }
+      List<PivotField> secondHirarchicalPivotFields = null;
+      if (listPivotField == null || listPivotField.isEmpty()) {
+        return list;
+      } else {
+        secondHirarchicalPivotFields = listPivotField.get(0);
+      }
+      List<NodeData> datatList = new ArrayList<>();
+      for (PivotField singlePivotField : secondHirarchicalPivotFields) {
+        if (singlePivotField != null) {
+          NodeData comp = new NodeData();
+          comp.setName("" + singlePivotField.getValue());
+          List<PivotField> levelList = singlePivotField.getPivot();
+          List<NameValueData> levelCountList = new ArrayList<>();
+          comp.setLogLevelCount(levelCountList);
+          if(levelList != null){
+            for (PivotField levelPivot : levelList) {
+              NameValueData level = new NameValueData();
+              level.setName(("" + levelPivot.getValue()).toUpperCase());
+              level.setValue("" + levelPivot.getCount());
+              levelCountList.add(level);
+            }
+          }
+          datatList.add(comp);
+        }
+      }
+      list.setvNodeList(datatList);
+      return list;
+    } catch (SolrException | SolrServerException | IOException e) {
+      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+  }
+
+  public NameValueDataListResponse getExtremeDatesForBundelId(SearchCriteria searchCriteria) {
+    SolrQuery solrQuery = new SolrQuery();
+    NameValueDataListResponse nameValueList = new NameValueDataListResponse();
+    try {
+      String bundelId = (String) searchCriteria
+        .getParamValue(LogSearchConstants.BUNDLE_ID);
+      if(StringUtils.isBlank(bundelId)){
+        bundelId = "";
+      }
+
+      queryGenerator.setSingleIncludeFilter(solrQuery,
+        LogSearchConstants.BUNDLE_ID, bundelId);
+
+      SolrUtil.setMainQuery(solrQuery, null);
+      solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.asc);
+      SolrUtil.setRowCount(solrQuery, 1);
+
+      List<NameValueData> vNameValues = new ArrayList<>();
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+
+      if(response == null){
+        return nameValueList;
+      }
+
+      SolrDocumentList solrDocList = response.getResults();
+      if(solrDocList == null){
+        return nameValueList;
+      }
+      for (SolrDocument solrDoc : solrDocList) {
+
+        Date logTimeAsc = (Date) solrDoc
+          .getFieldValue(LogSearchConstants.LOGTIME);
+        if (logTimeAsc != null) {
+          NameValueData nameValue = new NameValueData();
+          nameValue.setName("From");
+          nameValue.setValue("" + logTimeAsc.getTime());
+          vNameValues.add(nameValue);
+        }
+      }
+
+      solrQuery.clear();
+      SolrUtil.setMainQuery(solrQuery, null);
+      queryGenerator.setSingleIncludeFilter(solrQuery,
+        LogSearchConstants.BUNDLE_ID, bundelId);
+      solrQuery.setSort(LogSearchConstants.LOGTIME, SolrQuery.ORDER.desc);
+      SolrUtil.setRowCount(solrQuery, 1);
+
+      solrDocList.clear();
+      response = serviceLogsSolrDao.process(solrQuery);
+
+      solrDocList = response.getResults();
+      for (SolrDocument solrDoc : solrDocList) {
+        if (solrDoc != null) {
+          Date logTimeDesc = (Date) solrDoc
+              .getFieldValue(LogSearchConstants.LOGTIME);
+
+          if (logTimeDesc != null) {
+            NameValueData nameValue = new NameValueData();
+            nameValue.setName("To");
+            nameValue.setValue("" + logTimeDesc.getTime());
+            vNameValues.add(nameValue);
+          }
+        }
+      }
+      nameValueList.setvNameValues(vNameValues);
+
+
+    } catch (SolrServerException | SolrException | IOException e) {
+      logger.error(e.getMessage() + "SolrQuery"+solrQuery);
+      nameValueList = new NameValueDataListResponse();
+    }
+    return nameValueList;
+  }
+
+  public String getServiceLogsFieldsName() {
+    List<String> fieldsNames = solrServiceLogConfig.getFields();
+    if (fieldsNames.size() > 0) {
+
+      List<String> uiFieldNames = new ArrayList<String>();
+      String temp = null;
+      for (String field : fieldsNames) {
+        temp = solrServiceLogConfig.getSolrAndUiColumns().get(field + LogSearchConstants.SOLR_SUFFIX);
+        if (temp == null){
+          uiFieldNames.add(field);
+        }else{
+          uiFieldNames.add(temp);
+        }
+      }
+      return convertObjToString(uiFieldNames);
+
+    }
+    throw RESTErrorUtil.createRESTException(
+      "No field name found in property file",
+      MessageEnums.DATA_NOT_FOUND);
+
+  }
+
+  public String getServiceLogsSchemaFieldsName() {
+
+    List<String> fieldNames = new ArrayList<String>();
+    String excludeArray[] = Arrays.copyOf(solrServiceLogConfig.getExcludeColumnList().toArray(),
+      solrServiceLogConfig.getExcludeColumnList().size(), String[].class);
+
+    HashMap<String, String> uiFieldColumnMapping = new LinkedHashMap<String, String>();
+    ConfigHelper.getSchemaFieldsName(excludeArray, fieldNames,serviceLogsSolrDao);
+
+    for (String fieldName : fieldNames) {
+      String uiField = solrServiceLogConfig.getSolrAndUiColumns().get(fieldName + LogSearchConstants.SOLR_SUFFIX);
+      if (uiField != null) {
+        uiFieldColumnMapping.put(fieldName, uiField);
+      } else {
+        uiFieldColumnMapping.put(fieldName, fieldName);
+      }
+    }
+
+    HashMap<String, String> uiFieldColumnMappingSorted = new LinkedHashMap<String, String>();
+    uiFieldColumnMappingSorted.put(LogSearchConstants.SOLR_LOG_MESSAGE, LogSearchConstants.SOLR_LOG_MESSAGE);
+
+    Iterator<Entry<String, String>> it = BizUtil
+        .sortHashMapByValues(uiFieldColumnMapping).entrySet().iterator();
+    while (it.hasNext()) {
+      @SuppressWarnings("rawtypes")
+      Map.Entry pair = (Map.Entry) it.next();
+      uiFieldColumnMappingSorted.put("" + pair.getKey(), "" + pair.getValue());
+    }
+
+    return convertObjToString(uiFieldColumnMappingSorted);
+
+  }
+
+  @SuppressWarnings("unchecked")
+  public void extractValuesFromBuckets(
+    SimpleOrderedMap<Object> jsonFacetResponse, String outerField,
+    String innerField, List<BarGraphData> histogramData) {
+    NamedList<Object> stack = (NamedList<Object>) jsonFacetResponse
+      .get(outerField);
+    ArrayList<Object> stackBuckets = (ArrayList<Object>) stack
+      .get("buckets");
+    for (Object temp : stackBuckets) {
+      BarGraphData vBarGraphData = new BarGraphData();
+
+      SimpleOrderedMap<Object> level = (SimpleOrderedMap<Object>) temp;
+      String name = ((String) level.getVal(0)).toUpperCase();
+      vBarGraphData.setName(name);
+
+      Collection<NameValueData> vNameValues = new ArrayList<NameValueData>();
+      vBarGraphData.setDataCount(vNameValues);
+      ArrayList<Object> levelBuckets = (ArrayList<Object>) ((NamedList<Object>) level
+        .get(innerField)).get("buckets");
+      for (Object temp1 : levelBuckets) {
+        SimpleOrderedMap<Object> countValue = (SimpleOrderedMap<Object>) temp1;
+        String value = DateUtil
+          .convertDateWithMillisecondsToSolrDate((Date) countValue
+            .getVal(0));
+
+        String count = "" + countValue.getVal(1);
+        NameValueData vNameValue = new NameValueData();
+        vNameValue.setName(value);
+        vNameValue.setValue(count);
+        vNameValues.add(vNameValue);
+      }
+      histogramData.add(vBarGraphData);
+    }
+  }
+
+  public BarGraphDataListResponse getAnyGraphData(SearchCriteria searchCriteria) {
+    searchCriteria.addParam("fieldTime", LogSearchConstants.LOGTIME);
+    SolrQuery solrQuery = queryGenerator.commonServiceFilterQuery(searchCriteria);
+    BarGraphDataListResponse result = graphDataGenerator.getAnyGraphData(searchCriteria, serviceLogsSolrDao, solrQuery);
+    if (result == null) {
+      result = new BarGraphDataListResponse();
+    }
+    return result;
+
+  }
+
+  public ServiceLogResponse getAfterBeforeLogs(SearchCriteria searchCriteria) {
+    ServiceLogResponse logResponse = new ServiceLogResponse();
+    List<SolrServiceLogData> docList = null;
+    String id = (String) searchCriteria
+      .getParamValue(LogSearchConstants.ID);
+    if (StringUtils.isBlank(id)) {
+      return logResponse;
+
+    }
+    String maxRows = "";
+
+    maxRows = (String) searchCriteria.getParamValue("numberRows");
+    if (StringUtils.isBlank(maxRows)){
+      maxRows = ""+maxRows;
+    }
+    String scrollType = (String) searchCriteria.getParamValue("scrollType");
+    if(StringUtils.isBlank(scrollType)){
+      scrollType = "";
+    }
+
+    String logTime = null;
+    String sequenceId = null;
+    try {
+      SolrQuery solrQuery = new SolrQuery();
+      SolrUtil.setMainQuery(solrQuery,
+        queryGenerator.buildFilterQuery(LogSearchConstants.ID, id));
+      SolrUtil.setRowCount(solrQuery, 1);
+      QueryResponse response = serviceLogsSolrDao.process(solrQuery);
+      if(response == null){
+        return logResponse;
+      }
+      docList = convertToSolrBeans(response);
+      if (docList != null && !docList.isEmpty()) {
+        Date date = docList.get(0).getLogTime();
+        logTime = DateUtil.convertDateWithMillisecondsToSolrDate(date);
+        sequenceId = ""
+          + docList.get(0).getSeqNum();
+      }
+      if (StringUtils.isBlank(logTime)) {
+        return logResponse;
+      }
+    } catch (SolrServerException | SolrException | IOException e) {
+      throw RESTErrorUtil.createRESTException(MessageEnums.SOLR_ERROR
+          .getMessage().getMessage(), MessageEnums.ERROR_SYSTEM);
+    }
+    if (LogSearchConstants.SCROLL_TYPE_BEFORE.equals(scrollType)) {
+      logResponse = whenScrollUp(searchCriteria, logTime,
+        sequenceId, maxRows);
+
+      List<ServiceLogData> solrDocList = new ArrayList<>();
+      for (ServiceLogData solrDoc : logResponse.getLogList()) {
+        solrDocList.add(solrDoc);
+      }
+      logResponse.setLogList(solrDocList);
+        return logResponse;
+
+    } else if (LogSearchConstants.SCROLL_TYPE_AFTER.equals(scrollType)) {
+      List<ServiceLogData> solrDocList = new ArrayList<>();
+      logResponse = new ServiceLogResponse();
+      for (ServiceLogData solrDoc : whenScrollDown(searchCriteria, logTime,
+          sequenceId, maxRows).getLogList()) {
+        solrDocList.add(solrDoc);
+      }
+      logResponse.setLogList(solrDocList);
+      return logResponse;
+
+    } else {
+      logResponse = new ServiceLogResponse();
+      List<ServiceLogData> initial = new ArrayList<>();
+      List<ServiceLogData> before = whenScrollUp(searchCriteria, logTime,
+        sequenceId, maxRows).getLogList();
+      List<ServiceLogData> after = whenScrollDown(searchCriteria, logTime,
+        sequenceId, maxRows).getLogList();
+      if (before != null && !before.isEmpty()) {
+        for (ServiceLogData solrDoc : Lists.reverse(before)) {
+          initial.add(solrDoc);
+        }
+      }
+
+      initial.add(docList.get(0));
+      if (after != null && !after.isEmpty()) {
+        for (ServiceLogData solrDoc : after) {
+          initial.add(solrDoc);
+        }
+      }
+
+      logResponse.setLogList(initial);
+
+      return logResponse;
+
+    }
+  }
+
+  private ServiceLogResponse whenScrollUp(SearchCriteria searchCriteria,
+                                          String logTime, String sequenceId, String maxRows) {
+    SolrQuery solrQuery = new SolrQuery();
+    SolrUtil.setMainQuery(solrQuery, null);
+    try {
+      int seq_num = Integer.parseInt(sequenceId) - 1;
+      sequenceId = "" + seq_num;
+    } catch (Exception e) {
+
+    }
+    queryGenerator.setSingleRangeFilter(
+      solrQuery,
+      LogSearchConstants.SEQUNCE_ID, "*", sequenceId);
+
+    queryGenerator.applyLogFileFilter(solrQuery, searchCriteria);
+
+    queryGenerator.setSingleRangeFilter(solrQuery,
+      LogSearchConstants.LOGTIME, "*", logTime);
+    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
+    String order1 = LogSearchConstants.LOGTIME + " "
+      + LogSearchConstants.DESCENDING_ORDER;
+    String order2 = LogSearchConstants.SEQUNCE_ID + " "
+      + LogSearchConstants.DESCENDING_ORDER;
+    List<String> sortOrder = new ArrayList<String>();
+    sortOrder.add(order1);
+    sortOrder.add(order2);
+    searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
+    queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
+
+    return (ServiceLogResponse) getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
+  }
+
+  private ServiceLogResponse whenScrollDown(SearchCriteria searchCriteria,
+                                            String logTime, String sequenceId, String maxRows) {
+    SolrQuery solrQuery = new SolrQuery();
+    SolrUtil.setMainQuery(solrQuery, null);
+    queryGenerator.applyLogFileFilter(solrQuery, searchCriteria);
+
+    try {
+      int seq_num = Integer.parseInt(sequenceId) + 1;
+      sequenceId = "" + seq_num;
+    } catch (Exception e) {
+
+    }
+    queryGenerator.setSingleRangeFilter(
+      solrQuery,
+      LogSearchConstants.SEQUNCE_ID, sequenceId, "*");
+    queryGenerator.setSingleRangeFilter(solrQuery,
+      LogSearchConstants.LOGTIME, logTime, "*");
+    SolrUtil.setRowCount(solrQuery, Integer.parseInt(maxRows));
+
+    String order1 = LogSearchConstants.LOGTIME + " "
+      + LogSearchConstants.ASCENDING_ORDER;
+    String order2 = LogSearchConstants.SEQUNCE_ID + " "
+      + LogSearchConstants.ASCENDING_ORDER;
+    List<String> sortOrder = new ArrayList<String>();
+    sortOrder.add(order1);
+    sortOrder.add(order2);
+    searchCriteria.addParam(LogSearchConstants.SORT, sortOrder);
+    queryGenerator.setMultipleSortOrder(solrQuery, searchCriteria);
+
+    return (ServiceLogResponse) getLogAsPaginationProvided(solrQuery, serviceLogsSolrDao);
+  }
+
+  @Scheduled(cron = "${logsearch.solr.warming.cron}")
+  public void warmingSolrServer(){
+    logger.info("solr warming triggered.");
+    SolrQuery solrQuery = new SolrQuery();
+    TimeZone gmtTimeZone = TimeZone.getTimeZone("GMT");
+    GregorianCalendar utc = new GregorianCalendar(gmtTimeZone);
+    utc.setTimeInMillis(new Date().getTime());
+    utc.set(Calendar.HOUR, 0);
+    utc.set(Calendar.MINUTE, 0);
+    utc.set(Calendar.MILLISECOND, 001);
+    utc.set(Calendar.SECOND, 0);
+    DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    String from = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    utc.set(Calendar.MILLISECOND, 999);
+    utc.set(Calendar.SECOND, 59);
+    utc.set(Calendar.MINUTE, 59);
+    utc.set(Calendar.HOUR, 23);
+    String to = DateUtil.convertDateWithMillisecondsToSolrDate(utc.getTime());
+    queryGenerator.setSingleRangeFilter(solrQuery,
+        LogSearchConstants.LOGTIME, from,to);
+    String level = LogSearchConstants.FATAL+","+LogSearchConstants.ERROR+","+LogSearchConstants.WARN;
+    queryGenerator.setFilterClauseWithFieldName(solrQuery, level,
+        LogSearchConstants.SOLR_LEVEL, "", QueryGenerationBase.Condition.OR);
+    try {
+      serviceLogsSolrDao.process(solrQuery);
+    } catch (SolrServerException | IOException e) {
+      logger.error("Error while warming solr server",e);
+    }
+  }
+
+  @Override
+  protected List<SolrServiceLogData> convertToSolrBeans(QueryResponse response) {
+    return response.getBeans(SolrServiceLogData.class);
+  }
+
+  @Override
+  protected ServiceLogResponse createLogSearchResponse() {
+    return new ServiceLogResponse();
+  }
+
+  private List<LogData> getLogDataListByFieldType(Class clazz, QueryResponse response, List<Count> fieldList) {
+    List<LogData> groupList = getComponentBeans(clazz, response);
+    String temp = "";
+    for (Count cnt : fieldList) {
+      LogData logData = createNewFieldByType(clazz, cnt, temp);
+      groupList.add(logData);
+    }
+    return groupList;
+  }
+
+  private <T extends LogData> List<LogData> getComponentBeans(Class<T> clazz, QueryResponse response) {
+    if (clazz.isAssignableFrom(SolrHostLogData.class) || clazz.isAssignableFrom(SolrComponentTypeLogData.class)) {
+      return (List<LogData>) response.getBeans(clazz);
+    } else {
+      throw new UnsupportedOperationException();
+    }
+  }
+
+  private <T extends LogData> LogData createNewFieldByType(Class<T> clazz, Count count, String temp) {
+    temp = count.getName();
+    LogData result = null;
+    if (clazz.isAssignableFrom(SolrHostLogData.class)) {
+      SolrHostLogData fieldData = new SolrHostLogData();
+      fieldData.setHost(temp);
+      result = fieldData;
+    } else if (clazz.isAssignableFrom(SolrComponentTypeLogData.class)) {
+      SolrComponentTypeLogData fieldData = new SolrComponentTypeLogData();
+      fieldData.setType(temp);
+      result = fieldData;
+    }
+    if (result != null) {
+      return result;
+    }
+    throw new UnsupportedOperationException();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java
new file mode 100644
index 0000000..206636a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionManager.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.manager;
+
+import org.apache.ambari.logsearch.common.LogSearchContext;
+import org.apache.ambari.logsearch.web.model.User;
+import org.apache.log4j.Logger;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.security.web.authentication.WebAuthenticationDetails;
+import org.springframework.stereotype.Component;
+
+@Component
+public class SessionManager {
+
+  private static final Logger logger = Logger.getLogger(SessionManager.class);
+
+  public SessionManager() {
+    logger.debug("SessionManager created");
+  }
+
+  public User processSuccessLogin() {
+    boolean newSessionCreation = true;
+    Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
+    WebAuthenticationDetails details = (WebAuthenticationDetails) authentication.getDetails();
+    String currentLoginId = authentication.getName();
+    LogSearchContext context = LogSearchContext.getContext();
+    User user = context.getUser();
+    if (user != null) {
+      if (validateUser(user, currentLoginId)) {
+        newSessionCreation = false;
+      }
+    }
+    //
+    if (newSessionCreation) {
+      user = new User();
+      user.setUsername(currentLoginId);
+      if (details != null) {
+        logger.info("Login Success: loginId=" + currentLoginId + ", sessionId=" + details.getSessionId()
+          + ", requestId=" + details.getRemoteAddress());
+      } else {
+        logger.info("Login Success: loginId=" + currentLoginId + ", msaSessionId=" + ", details is null");
+      }
+
+    }
+
+    return user;
+  }
+
+  private boolean validateUser(User user, String currentUsername) {
+    if (currentUsername.equalsIgnoreCase(user.getUsername())) {
+      return true;
+    } else {
+      logger.info("loginId doesn't match loginId from HTTPSession. Will create new session. loginId="
+        + currentUsername + ", user=" + user, new Exception());
+      return false;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e9e834bf/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
deleted file mode 100644
index c6f7dc5..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/SessionMgr.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.manager;
-
-import org.apache.ambari.logsearch.common.LogSearchContext;
-import org.apache.ambari.logsearch.web.model.User;
-import org.apache.log4j.Logger;
-import org.springframework.security.core.Authentication;
-import org.springframework.security.core.context.SecurityContextHolder;
-import org.springframework.security.web.authentication.WebAuthenticationDetails;
-import org.springframework.stereotype.Component;
-
-@Component
-public class SessionMgr {
-
-  private static final Logger logger = Logger.getLogger(SessionMgr.class);
-
-  public SessionMgr() {
-    logger.debug("SessionManager created");
-  }
-
-  public User processSuccessLogin() {
-    boolean newSessionCreation = true;
-    Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
-    WebAuthenticationDetails details = (WebAuthenticationDetails) authentication.getDetails();
-    String currentLoginId = authentication.getName();
-    LogSearchContext context = LogSearchContext.getContext();
-    User user = context.getUser();
-    if (user != null) {
-      if (validateUser(user, currentLoginId)) {
-        newSessionCreation = false;
-      }
-    }
-    //
-    if (newSessionCreation) {
-      user = new User();
-      user.setUsername(currentLoginId);
-      if (details != null) {
-        logger.info("Login Success: loginId=" + currentLoginId + ", sessionId=" + details.getSessionId()
-          + ", requestId=" + details.getRemoteAddress());
-      } else {
-        logger.info("Login Success: loginId=" + currentLoginId + ", msaSessionId=" + ", details is null");
-      }
-
-    }
-
-    return user;
-  }
-
-  private boolean validateUser(User user, String currentUsername) {
-    if (currentUsername.equalsIgnoreCase(user.getUsername())) {
-      return true;
-    } else {
-      logger.info("loginId doesn't match loginId from HTTPSession. Will create new session. loginId="
-        + currentUsername + ", user=" + user, new Exception());
-      return false;
-    }
-  }
-
-}


[02/50] [abbrv] ambari git commit: AMBARI-18196. Generate REST API docs with Swagger for Log Search (oleewere)

Posted by ol...@apache.org.
AMBARI-18196. Generate REST API docs with Swagger for Log Search (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/326e23d0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/326e23d0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/326e23d0

Branch: refs/heads/branch-dev-logsearch
Commit: 326e23d0be1f8a6705aad3a9cb0df05d6cd4eac6
Parents: e70ba34
Author: oleewere <ol...@gmail.com>
Authored: Thu Aug 18 12:15:52 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:57 2016 +0200

----------------------------------------------------------------------
 ambari-logsearch/ambari-logsearch-it/pom.xml    |   1 +
 .../ambari-logsearch-portal/pom.xml             |  32 +++
 .../org/apache/ambari/logsearch/LogSearch.java  |  39 ++-
 .../ambari/logsearch/doc/DocConstants.java      | 135 +++++++++
 .../apache/ambari/logsearch/rest/AuditREST.java | 145 ++++++++++
 .../ambari/logsearch/rest/DashboardREST.java    | 285 +++++++++++++++++++
 .../ambari/logsearch/rest/LogFileREST.java      |  20 ++
 .../ambari/logsearch/rest/PublicREST.java       |   6 +
 .../ambari/logsearch/rest/UserConfigREST.java   |  21 ++
 .../main/webapp/META-INF/applicationContext.xml |  16 ++
 .../META-INF/security-applicationContext.xml    |   3 +-
 .../src/main/webapp/WEB-INF/web.xml             |   5 +-
 .../src/main/webapp/login.html                  |   2 +-
 .../src/main/webapp/scripts/App.js              |   4 +-
 .../src/main/webapp/scripts/utils/Globals.js    |   4 +-
 .../scripts/views/audit/AuditAggregatedView.js  |   2 +-
 .../webapp/scripts/views/tabs/LogFileView.js    |   2 +-
 .../main/webapp/templates/helpers/Helpers.js    |   4 +-
 18 files changed, 705 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-it/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/pom.xml b/ambari-logsearch/ambari-logsearch-it/pom.xml
index 075a22a..d918c8a 100644
--- a/ambari-logsearch/ambari-logsearch-it/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-it/pom.xml
@@ -113,6 +113,7 @@
                 <include>**/*Stories.java</include>
                 <include>**/*Story.java</include>
               </includes>
+              <skip>${it.skip}</skip>
             </configuration>
           </execution>
           <execution>

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index 7a46fe5..349531e 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -35,6 +35,7 @@
     <spring.ldap.version>2.0.4.RELEASE</spring.ldap.version>
     <jersey.version>1.19</jersey.version>
     <jetty-version>9.2.11.v20150529</jetty-version>
+    <swagger.version>1.5.8</swagger.version>
   </properties>
   <profiles>
     <!-- Dev Profile Start -->
@@ -646,5 +647,36 @@
       <artifactId>cglib</artifactId>
       <version>3.2.4</version>
     </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-annotations</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-core</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-jaxrs</artifactId>
+      <version>${swagger.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.ws.rs</groupId>
+          <artifactId>jsr311-api</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>io.swagger</groupId>
+      <artifactId>swagger-models</artifactId>
+      <version>${swagger.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.webjars</groupId>
+      <artifactId>swagger-ui</artifactId>
+      <version>2.1.0</version>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 819d3b9..d1fbcde 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -19,6 +19,7 @@
 package org.apache.ambari.logsearch;
 
 import java.io.IOException;
+import java.net.MalformedURLException;
 import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -38,6 +39,9 @@ import org.eclipse.jetty.server.SecureRequestCustomizer;
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.server.ServerConnector;
 import org.eclipse.jetty.server.SslConnectionFactory;
+import org.eclipse.jetty.server.handler.HandlerList;
+import org.eclipse.jetty.server.handler.ResourceHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.util.resource.Resource;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.webapp.WebAppContext;
@@ -79,14 +83,13 @@ public class LogSearch {
   
   public void run(String[] argv) throws Exception {
     Server server = buildSever(argv);
-    URI webResourceBase = findWebResourceBase(server.getClass()
-        .getClassLoader());
-    WebAppContext context = new WebAppContext();
-    context.setBaseResource(Resource.newResource(webResourceBase));
-    context.setContextPath(ROOT_CONTEXT);
-    context.setParentLoaderPriority(true);
-    server.setHandler(context);
+    HandlerList handlers = new HandlerList();
+    handlers.addHandler(createSwaggerContext());
+    handlers.addHandler(createBaseWebappContext());
+
+    server.setHandler(handlers);
     server.start();
+
     logger
         .debug("============================Server Dump=======================================");
     logger.debug(server.dump());
@@ -95,7 +98,7 @@ public class LogSearch {
     ConfigUtil.initializeApplicationConfig();
     server.join();
   }
-  
+
   public Server buildSever(String argv[]) {
     Server server = new Server();
     ServerConnector connector = new ServerConnector(server);
@@ -145,6 +148,26 @@ public class LogSearch {
     return server;
   }
 
+  private WebAppContext createBaseWebappContext() throws MalformedURLException {
+    URI webResourceBase = findWebResourceBase(LogSearch.class.getClassLoader());
+    WebAppContext context = new WebAppContext();
+    context.setBaseResource(Resource.newResource(webResourceBase));
+    context.setContextPath(ROOT_CONTEXT);
+    context.setParentLoaderPriority(true);
+    return context;
+  }
+
+  private ServletContextHandler createSwaggerContext() throws URISyntaxException {
+    ResourceHandler resourceHandler = new ResourceHandler();
+    resourceHandler.setResourceBase(LogSearch.class.getClassLoader()
+      .getResource("META-INF/resources/webjars/swagger-ui/2.1.0")
+      .toURI().toString());
+    ServletContextHandler context = new ServletContextHandler();
+    context.setContextPath("/docs/");
+    context.setHandler(resourceHandler);
+    return context;
+  }
+
   private URI findWebResourceBase(ClassLoader classLoader) {
     URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
         .getResource(WEB_RESOURCE_FOLDER);

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
new file mode 100644
index 0000000..c1572b7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.doc;
+
+public class DocConstants {
+
+  public class CommonDescriptions {
+    public static final String X_AXIS_D = "";
+    public static final String Y_AXIS_D = "";
+    public static final String STACK_BY_D = "";
+    public static final String EXCLUDE_QUERY_D = "";
+    public static final String INCLUDE_QUERY_D = "";
+    public static final String MUST_BE_D = "";
+    public static final String MUST_NOT_D = "";
+    public static final String FROM_D = "";
+    public static final String TO_D = "";
+    public static final String UNIT_D = "";
+    public static final String QUERY_D = "";
+    public static final String COLUMN_QUERY_D = "";
+    public static final String I_MESSAGE_D = "";
+    public static final String G_E_MESSAGE_D = "";
+    public static final String E_MESSAGE_D = "";
+    public static final String IS_LAST_PAGE_D = "";
+    public static final String FIELD_D = "";
+    public static final String FORMAT_D = "";
+  }
+
+  public class AuditOperationDescriptions {
+    public static final String GET_AUDIT_SCHEMA_FIELD_LIST_OD = "";
+    public static final String GET_AUDIT_LOGS_OD = "";
+    public static final String GET_AUDIT_COMPONENTS_OD = "";
+    public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "";
+    public static final String GET_TOP_AUDIT_USERS_OD = "";
+    public static final String GET_TOP_AUDIT_RESOURCES_OD = "";
+    public static final String GET_TOP_AUDIT_COMPONENTS_OD = "";
+    public static final String GET_LIVE_LOGS_COUNT_OD = "";
+    public static final String GET_REQUEST_USER_LINE_GRAPH_OD = "";
+    public static final String GET_ANY_GRAPH_DATA_OD = "";
+    public static final String EXPORT_USER_TALBE_TO_TEXT_FILE_OD = "";
+    public static final String GET_SERVICE_LOAD_OD = "";
+  }
+
+  public class ServiceDescriptions {
+    public static final String LEVEL_D = "";
+    public static final String ADVANCED_SEARCH_D = "";
+    public static final String TREE_PARAMS_D = "";
+    public static final String START_TIME_D = "";
+    public static final String END_TIME_D = "";
+    public static final String FILE_NAME_D = "";
+    public static final String HOST_NAME_D = "";
+    public static final String COMPONENT_NAME_D = "";
+    public static final String FIND_D = "";
+    public static final String ID_D = "";
+    public static final String HOST_D = "";
+    public static final String COMPONENT_D = "";
+    public static final String KEYWORD_TYPE_D = "";
+    public static final String TOKEN_D = "";
+    public static final String SOURCE_LOG_ID_D = "";
+    public static final String G_MUST_NOT_D = "";
+    public static final String NUMBER_ROWS_D = "";
+    public static final String SCROLL_TYPE_D = "";
+    public static final String UTC_OFFSET_D = "";
+  }
+
+  public class ServiceOperationDescriptions {
+    public static final String SEARCH_LOGS_OD = "";
+    public static final String GET_HOSTS_OD = "";
+    public static final String GET_COMPONENTS_OD = "";
+    public static final String GET_AGGREGATED_INFO_OD = "";
+    public static final String GET_LOG_LEVELS_COUNT_OD = "";
+    public static final String GET_COMPONENTS_COUNT_OD = "";
+    public static final String GET_HOSTS_COUNT_OD = "";
+    public static final String GET_TREE_EXTENSION_OD = "";
+    public static final String GET_HISTOGRAM_DATA_OD = "";
+    public static final String CANCEL_FIND_REQUEST_OD = "";
+    public static final String EXPORT_TO_TEXT_FILE_OD = "";
+    public static final String GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD = "";
+    public static final String GET_EXTREME_DATES_FOR_BUNDLE_ID_OD = "";
+    public static final String GET_SERVICE_LOGS_FIELD_NAME_OD = "";
+    public static final String GET_ANY_GRAPH_DATA_OD = "";
+    public static final String GET_AFTER_BEFORE_LOGS_OD = "";
+    public static final String GET_HOST_LIST_BY_COMPONENT_OD = "";
+    public static final String GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD = "";
+    public static final String GET_HADOOP_SERVICE_CONFIG_JSON_OD = "";
+  }
+
+
+  public class LogFileDescriptions {
+    public static final String HOST_D = "";
+    public static final String COMPONENT_D = "";
+    public static final String LOG_TYPE_D = "";
+  }
+
+  public class LogFileOperationDescriptions {
+    public static final String SEARCH_LOG_FILES_OD = "";
+    public static final String GET_LOG_FILE_TAIL_OD = "";
+  }
+
+  public class PublicOperationDescriptions {
+    public static final String OBTAIN_GENERAL_CONFIG_OD = "Obtain general config";
+  }
+
+  public class UserConfigDescriptions {
+    public static final String USER_ID_D = "";
+    public static final String FILTER_NAME_D = "";
+    public static final String ROW_TYPE_D = "";
+  }
+
+  public class UserConfigOperationDescriptions {
+    public static final String SAVE_USER_CONFIG_OD = "Save user config";
+    public static final String UPDATE_USER_CONFIG_OD = "Update user config";
+    public static final String DELETE_USER_CONFIG_OD = "Delete user config";
+    public static final String GET_USER_CONFIG_OD = "Get user config";
+    public static final String GET_USER_FILTER_OD = "Get user filter";
+    public static final String UPDATE_USER_FILTER_OD = "Update user filter";
+    public static final String GET_USER_FILTER_BY_ID_OD = "Get user filter by id";
+    public static final String GET_ALL_USER_NAMES_OD = "Get all user names";
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
index 6d18b4a..5ed49fd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/AuditREST.java
@@ -26,12 +26,20 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
 
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.AuditMgr;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.AuditOperationDescriptions.*;
+
+@Api(value = "audit", description = "Audit operations")
 @Path("audit")
 @Component
 @Scope("request")
@@ -43,6 +51,7 @@ public class AuditREST {
   @GET
   @Path("/getAuditSchemaFieldsName")
   @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_SCHEMA_FIELD_LIST_OD)
   public String getSolrFieldList(@Context HttpServletRequest request) {
     return auditMgr.getAuditLogsSchemaFieldsName();
   }
@@ -50,6 +59,21 @@ public class AuditREST {
   @GET
   @Path("/getAuditLogs")
   @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query")
+  })
   public String getAuditLogs(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -60,6 +84,10 @@ public class AuditREST {
   @GET
   @Path("/getAuditComponents")
   @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_COMPONENTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+  })
   public String getAuditComponents(@Context HttpServletRequest request) {
 
     SearchCriteria searchCriteria = new SearchCriteria(request);
@@ -70,6 +98,21 @@ public class AuditREST {
   @GET
   @Path("/getAuditLineGraphData")
   @Produces({"application/json"})
+  @ApiOperation(GET_AUDIT_LINE_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
   public String getAuditLineGraphData(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -80,6 +123,21 @@ public class AuditREST {
   @GET
   @Path("/getTopAuditUsers")
   @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_USERS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
+  })
   public String getTopAuditUsers(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -90,6 +148,21 @@ public class AuditREST {
   @GET
   @Path("/getTopAuditResources")
   @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_RESOURCES_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query")
+  })
   public String getTopAuditResources(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -103,6 +176,22 @@ public class AuditREST {
   @GET
   @Path("/getTopAuditComponents")
   @Produces({"application/json"})
+  @ApiOperation(GET_TOP_AUDIT_COMPONENTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
   public String getTopAuditComponents(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -114,6 +203,7 @@ public class AuditREST {
   @GET
   @Path("/getLiveLogsCount")
   @Produces({"application/json"})
+  @ApiOperation(GET_LIVE_LOGS_COUNT_OD)
   public String getLiveLogsCount() {
     return auditMgr.getLiveLogCounts();
   }
@@ -121,6 +211,22 @@ public class AuditREST {
   @GET
   @Path("/getRequestUserLineGraph")
   @Produces({"application/json"})
+  @ApiOperation(GET_REQUEST_USER_LINE_GRAPH_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
   public String getRequestUserLineGraph(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -132,6 +238,15 @@ public class AuditREST {
   @GET
   @Path("/getAnyGraphData")
   @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query")
+  })
   public String getAnyGraphData(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("xAxis", request.getParameter("xAxis"));
@@ -146,6 +261,22 @@ public class AuditREST {
   @GET
   @Path("/exportUserTableToTextFile")
   @Produces({"application/json"})
+  @ApiOperation(EXPORT_USER_TALBE_TO_TEXT_FILE_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIELD_D, name = "field", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query")
+  })
   public Response exportUserTableToTextFile(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);
@@ -157,6 +288,20 @@ public class AuditREST {
   @GET
   @Path("/getServiceLoad")
   @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOAD_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+  })
   public String getServiceLoad(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredAuditLogsParams(request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
index cb87f6f..0144edc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/DashboardREST.java
@@ -25,6 +25,10 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.Response;
 
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.LogsMgr;
@@ -37,6 +41,11 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.CommonDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.ServiceOperationDescriptions.*;
+
+@Api(value = "dashboard", description = "Dashboard operations")
 @Path("dashboard")
 @Component
 @Scope("request")
@@ -51,6 +60,34 @@ public class DashboardREST {
   @GET
   @Path("/solr/logs_search")
   @Produces({"application/json"})
+  @ApiOperation(SEARCH_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FIND_D, name = "find", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = SOURCE_LOG_ID_D, name = "sourceLogId", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = KEYWORD_TYPE_D, name = "keywordType", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TOKEN_D, name = "token", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = IS_LAST_PAGE_D, name = "isLastPage", dataType = "boolean", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String searchSolrData(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -70,6 +107,10 @@ public class DashboardREST {
   @GET
   @Path("/hosts")
   @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
+  })
   public String getHosts(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("q", request.getParameter("q"));
@@ -79,6 +120,10 @@ public class DashboardREST {
   @GET
   @Path("/components")
   @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query")
+  })
   public String getComponents(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("q", request.getParameter("q"));
@@ -88,6 +133,27 @@ public class DashboardREST {
   @GET
   @Path("/aggregatedData")
   @Produces({"application/json"})
+  @ApiOperation(GET_AGGREGATED_INFO_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getAggregatedInfo(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -97,6 +163,12 @@ public class DashboardREST {
   @GET
   @Path("/levels_count")
   @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public VCountList getLogLevelsCount(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();
     searchCriteria.addParam("q", request.getParameter("q"));
@@ -109,6 +181,12 @@ public class DashboardREST {
   @GET
   @Path("/components_count")
   @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENTS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public VCountList getComponentsCount(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();
     searchCriteria.addParam("q", request.getParameter("q"));
@@ -121,6 +199,14 @@ public class DashboardREST {
   @GET
   @Path("/hosts_count")
   @Produces({"application/json"})
+  @ApiOperation(GET_HOSTS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = QUERY_D, name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query")
+  })
   public VCountList getHostsCount(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();
     searchCriteria.addParam("q", request.getParameter("q"));
@@ -137,6 +223,29 @@ public class DashboardREST {
   @GET
   @Path("/getTreeExtension")
   @Produces({"application/json"})
+  @ApiOperation(GET_TREE_EXTENSION_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public VNodeList getTreeExtension(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -150,6 +259,29 @@ public class DashboardREST {
   @GET
   @Path("/getLogLevelCounts")
   @Produces({"application/json"})
+  @ApiOperation(GET_LOG_LEVELS_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public VNameValueList getLogsLevelCount(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -162,6 +294,30 @@ public class DashboardREST {
   @GET
   @Path("/getHistogramData")
   @Produces({"application/json"})
+  @ApiOperation(GET_HISTOGRAM_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getHistogramData(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -175,6 +331,7 @@ public class DashboardREST {
   @GET
   @Path("/cancelFindRequest")
   @Produces({"application/json"})
+  @ApiOperation(CANCEL_FIND_REQUEST_OD)
   public String cancelFindRequest(@Context HttpServletRequest request) {
     return logMgr.cancelFindRequestByDate(request);
   }
@@ -182,6 +339,31 @@ public class DashboardREST {
   @GET
   @Path("/exportToTextFile")
   @Produces({"application/json"})
+  @ApiOperation(EXPORT_TO_TEXT_FILE_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FORMAT_D, name = "format", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UTC_OFFSET_D, name = "utcOffset", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public Response exportToTextFile(@Context HttpServletRequest request) {
 
     SearchCriteria searchCriteria = new SearchCriteria(request);
@@ -198,6 +380,29 @@ public class DashboardREST {
   @GET
   @Path("/getHostListByComponent")
   @Produces({"application/json"})
+  @ApiOperation(GET_HOST_LIST_BY_COMPONENT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getHostListByComponent(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -212,6 +417,29 @@ public class DashboardREST {
   @GET
   @Path("/getComponentListWithLevelCounts")
   @Produces({"application/json"})
+  @ApiOperation(GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getComponentListWithLevelCounts(
     @Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
@@ -225,6 +453,7 @@ public class DashboardREST {
   @GET
   @Path("/solr/getBundleIdBoundaryDates")
   @Produces({"application/json"})
+  @ApiOperation(GET_EXTREME_DATES_FOR_BUNDLE_ID_OD)
   public String getExtremeDatesForBundelId(@Context HttpServletRequest request) {
 
     SearchCriteria searchCriteria = new SearchCriteria(request);
@@ -238,6 +467,7 @@ public class DashboardREST {
   @GET
   @Path("/getServiceLogsFieldsName")
   @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_FIELD_NAME_OD)
   public String getServiceLogsFieldsName() {
     return logMgr.getServiceLogsFieldsName();
   }
@@ -245,6 +475,7 @@ public class DashboardREST {
   @GET
   @Path("/getServiceLogsSchemaFieldsName")
   @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD)
   public String getServiceLogsSchemaFieldsName() {
     return logMgr.getServiceLogsSchemaFieldsName();
   }
@@ -252,6 +483,33 @@ public class DashboardREST {
   @GET
   @Path("/getAnyGraphData")
   @Produces({"application/json"})
+  @ApiOperation(GET_ANY_GRAPH_DATA_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = X_AXIS_D, name = "xAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = Y_AXIS_D, name = "yAxis", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = STACK_BY_D, name = "stackBy", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FROM_D, name = "from", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TO_D, name = "to", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = UNIT_D, name = "unit", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getAnyGraphData(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -267,6 +525,32 @@ public class DashboardREST {
   @GET
   @Path("/getAfterBeforeLogs")
   @Produces({"application/json"})
+  @ApiOperation(GET_AFTER_BEFORE_LOGS_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_D,name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ID_D, name = "id", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = SCROLL_TYPE_D, name = "scrollType", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = NUMBER_ROWS_D, name = "numberRows", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = ADVANCED_SEARCH_D, name = "advancedSearch", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = QUERY_D,name = "q", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LEVEL_D, name = "level", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = TREE_PARAMS_D, name = "treeParams", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COLUMN_QUERY_D, name = "columnQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = I_MESSAGE_D, name = "iMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_E_MESSAGE_D, name = "gEMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = E_MESSAGE_D, name = "eMessage", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = G_MUST_NOT_D, name = "gMustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_BE_D, name = "mustBe", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = MUST_NOT_D, name = "mustNot", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_NAME_D, name = "host_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = COMPONENT_NAME_D, name = "component_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = FILE_NAME_D, name = "file_name", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = EXCLUDE_QUERY_D, name = "excludeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = INCLUDE_QUERY_D, name = "includeQuery", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = START_TIME_D, name = "start_time", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = END_TIME_D, name = "end_time", dataType = "string", paramType = "query")
+  })
   public String getAfterBeforeLogs(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addRequiredServiceLogsParams(request);
@@ -284,6 +568,7 @@ public class DashboardREST {
   @GET
   @Path("/getHadoopServiceConfigJSON")
   @Produces({"application/json"})
+  @ApiOperation(GET_HADOOP_SERVICE_CONFIG_JSON_OD)
   public String getHadoopServiceConfigJSON() {
     return logMgr.getHadoopServiceConfigJSON();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
index ef1bb8f..d53cff9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/LogFileREST.java
@@ -24,12 +24,20 @@ import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.LogFileMgr;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.LogFileOperationDescriptions.*;
+
+@Api(value = "logfile", description = "Logfile operations")
 @Path("logfile")
 @Component
 @Scope("request")
@@ -40,6 +48,12 @@ public class LogFileREST {
 
   @GET
   @Produces({"application/json"})
+  @ApiOperation(SEARCH_LOG_FILES_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query")
+  })
   public String searchLogFiles(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam("component", request.getParameter("component"));
@@ -51,6 +65,12 @@ public class LogFileREST {
   @GET
   @Path("/getLogFileTail")
   @Produces({"application/json"})
+  @ApiOperation(GET_LOG_FILE_TAIL_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = COMPONENT_D, name = "component", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = HOST_D, name = "host", dataType = "string", paramType = "query"),
+    @ApiImplicitParam(value = LOG_TYPE_D, name = "logType", dataType = "string", paramType = "query")
+  })
   public String getLogFileTail(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria();
     searchCriteria.addParam("host", request.getParameter("host"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
index 7977703..af48acd 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/PublicREST.java
@@ -21,11 +21,16 @@ package org.apache.ambari.logsearch.rest;
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.manager.PublicMgr;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.PublicOperationDescriptions.OBTAIN_GENERAL_CONFIG_OD;
+
+@Api(value = "public", description = "Public operations")
 @Path("public")
 @Component
 @Scope("request")
@@ -36,6 +41,7 @@ public class PublicREST {
 
   @GET
   @Path("/getGeneralConfig")
+  @ApiOperation(OBTAIN_GENERAL_CONFIG_OD)
   public String getGeneralConfig() {
     return generalMgr.getGeneralConfig();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
index 40b215c..4b1675f 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/rest/UserConfigREST.java
@@ -29,6 +29,10 @@ import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.Context;
 
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.SearchCriteria;
 import org.apache.ambari.logsearch.manager.UserConfigMgr;
@@ -38,6 +42,10 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Component;
 
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigDescriptions.*;
+import static org.apache.ambari.logsearch.doc.DocConstants.UserConfigOperationDescriptions.*;
+
+@Api(value = "userconfig", description = "User config operations")
 @Path("userconfig")
 @Component
 @Scope("request")
@@ -52,6 +60,7 @@ public class UserConfigREST {
   @POST
   @Path("/saveUserConfig")
   @Produces({"application/json"})
+  @ApiOperation(SAVE_USER_CONFIG_OD)
   public String saveUserConfig(VUserConfig vhist) {
     return userConfigMgr.saveUserConfig(vhist);
   }
@@ -59,12 +68,14 @@ public class UserConfigREST {
   @PUT
   @Path("/updateUserConfig")
   @Produces({"application/json"})
+  @ApiOperation(UPDATE_USER_CONFIG_OD)
   public String updateUserConfig(VUserConfig vhist) {
     return userConfigMgr.updateUserConfig(vhist);
   }
 
   @DELETE
   @Path("/deleteUserConfig/{id}")
+  @ApiOperation(DELETE_USER_CONFIG_OD)
   public void deleteUserConfig(@PathParam("id") String id) {
     userConfigMgr.deleteUserConfig(id);
   }
@@ -72,6 +83,12 @@ public class UserConfigREST {
   @GET
   @Path("/getUserConfig")
   @Produces({"application/json"})
+  @ApiOperation(GET_USER_CONFIG_OD)
+  @ApiImplicitParams(value = {
+    @ApiImplicitParam(value = USER_ID_D, name = "userId", paramType = "query", dataType = "string"),
+    @ApiImplicitParam(value = FILTER_NAME_D, name = "filterName", paramType = "query", dataType = "string"),
+    @ApiImplicitParam(value = ROW_TYPE_D, name = "rowType", paramType = "query", dataType = "string")
+  })
   public String getUserConfig(@Context HttpServletRequest request) {
     SearchCriteria searchCriteria = new SearchCriteria(request);
     searchCriteria.addParam(LogSearchConstants.USER_NAME,
@@ -86,6 +103,7 @@ public class UserConfigREST {
   @GET
   @Path("/user_filter")
   @Produces({"application/json"})
+  @ApiOperation(GET_USER_FILTER_OD)
   public String getUserFilter(@Context HttpServletRequest request) {
     return userConfigMgr.getUserFilter();
   }
@@ -93,6 +111,7 @@ public class UserConfigREST {
   @POST
   @Path("/user_filter")
   @Produces({"application/json"})
+  @ApiOperation(UPDATE_USER_FILTER_OD)
   public String createUserFilter(String json) {
     return userConfigMgr.saveUserFiter(json);
   }
@@ -100,6 +119,7 @@ public class UserConfigREST {
   @PUT
   @Path("/user_filter/{id}")
   @Produces({"application/json"})
+  @ApiOperation(GET_USER_FILTER_BY_ID_OD)
   public String updateUserFilter(String json) {
     return userConfigMgr.saveUserFiter(json);
   }
@@ -107,6 +127,7 @@ public class UserConfigREST {
   @GET
   @Path("/getAllUserName")
   @Produces({"application/json"})
+  @ApiOperation(GET_ALL_USER_NAMES_OD)
   public String getAllUserName() {
     return userConfigMgr.getAllUserName();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
index 5e24d88..f334e67 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/applicationContext.xml
@@ -49,5 +49,21 @@ http://www.springframework.org/schema/util/spring-util.xsd">
 		</property>
 		<property name="propertiesPersister" ref="xmlPropertyConfigurer" />
 	</bean>
+
+  <bean id="host" class="java.net.InetAddress" factory-method="getLocalHost"/>
+  <bean id="apiListingResource" class="io.swagger.jaxrs.listing.ApiListingResource"/>
+  <bean id="swaggerSerializers" class="io.swagger.jaxrs.listing.SwaggerSerializers" scope="singleton"/>
+  <bean id="beanConfig" class="io.swagger.jaxrs.config.BeanConfig">
+    <property name="schemes" value="http" /> <!-- TODO: set this from property -->
+    <property name="resourcePackage" value="org.apache.ambari.logsearch.rest"/>
+    <property name="version" value="1.0.0"/>
+    <property name="host" value="#{host.hostAddress}:61888"/> <!-- TODO: set port from property -->
+    <property name="basePath" value="/api/v1"/>
+    <property name="title" value="Log Search REST API"/>
+    <property name="description" value="Log aggregation, analysis, and visualization."/>
+    <property name="license" value="Apache 2.0"/>
+    <property name="licenseUrl" value="http://www.apache.org/licenses/LICENSE-2.0.html"/>
+    <property name="scan" value="true"/>
+  </bean>
 	
 </beans>

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
index 96852b5..233fad4 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/META-INF/security-applicationContext.xml
@@ -31,8 +31,9 @@ http://www.springframework.org/schema/security/spring-security-4.0.xsd">
     <security:http pattern="/libs/**" security="none" />
     <security:http pattern="/images/**" security="none" />
     <security:http pattern="/templates/**" security="none" />
-    <security:http pattern="/service/public/**" security="none" />
     <security:http pattern="/favicon.ico" security="none" />
+    <security:http pattern="/api/v1/public/**" security="none" />
+    <security:http pattern="/api/v1/swagger.json" security="none"/>
   	
 	<security:http disable-url-rewriting="true" use-expressions="true" create-session="always" entry-point-ref="authenticationProcessingFilterEntryPoint">
 		<csrf disabled="true"/> 

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
index dc8519a..aee16bf 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/WEB-INF/web.xml
@@ -65,8 +65,7 @@
 	</servlet>
 	  <servlet-mapping>
 		<servlet-name>REST service</servlet-name>
-		<url-pattern>/service/*</url-pattern>
+		<url-pattern>/api/v1/*</url-pattern>
 	</servlet-mapping>
-	
-	
+
 </web-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
index ff546ef..44f1aeb 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/login.html
@@ -35,7 +35,7 @@
 				var passDiv = $('#passCont');
 
 		        $.ajax({
-					url : "/service/public/getGeneralConfig",
+					url : "/api/v1/public/getGeneralConfig",
 					type: "GET",
 					async: true,
 					dataType: 'json',

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/App.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/App.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/App.js
index ef9b662..24cbd5a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/App.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/App.js
@@ -60,7 +60,7 @@ define(['backbone','utils/LangSupport', 'backbone.marionette'],function(Backbone
             context: this,
             url: tempUrl,
             success: function(response){
-                    serverUrl += "http://" + response.parameters['logsearch.server.url']+"/service/";
+                    serverUrl += "http://" + response.parameters['logsearch.server.url']+"/api/v1/";
             },
             error: function(response){
                     console.log("Error while getting log search server url");
@@ -70,7 +70,7 @@ define(['backbone','utils/LangSupport', 'backbone.marionette'],function(Backbone
     
         App.baseUrl = serverUrl//Utils.getLogSearchHostDetails();
     } else {
-        App.baseUrl = "service/";
+        App.baseUrl = "api/v1/";
     }
 
     // Add initialize hooks

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Globals.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Globals.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Globals.js
index 212846b..b6e8d7d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Globals.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/utils/Globals.js
@@ -39,8 +39,8 @@ define(['require','App'], function(require, App){
 			totalCount : 50
 	};
 
-	//Globals.baseURL = '../';
-	Globals.baseURL = App.baseUrl;
+	Globals.baseURL = '../api/v1/';
+	//Globals.baseURL = App.baseUrl;
 
 	Globals.AppTabs = {
 			DASHBOARD 			: { value:1, valStr: 'Dashboard'},

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
index acca51c..c04aaf9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditAggregatedView.js
@@ -408,7 +408,7 @@ define(['require',
 			obj.utcOffset = moment().utcOffset();
 			obj.startIndex =  this.topUsers.state.currentPage * this.topUsers.state.pageSize;
 			var params = $.param(_.extend({},this.topUsers.queryParams,obj));
-			var url = "service/audit/exportUserTableToTextFile?"+ params;
+			var url = "api/v1/audit/exportUserTableToTextFile?"+ params;
 			window.open(url);
 			this.onDialogClosed();
 		}

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
index 6932355..4af4670 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/LogFileView.js
@@ -203,7 +203,7 @@ define(['require',
                 //				{startIndex : this.logFileCollection.state.currentPage * this.logFileCollection.state.pageSize},obj),
                 //				'component','from','to','host','level','unit','startIndex','pageSize','format','utcOffset'));
                 var params = $.param(_.extend({}, this.logFileCollection.queryParams, obj));
-                var url = "service/dashboard/exportToTextFile?" + params;
+                var url = "api/v1/dashboard/exportToTextFile?" + params;
                 window.open(url);
                 this.onDialogClosed();
             },

http://git-wip-us.apache.org/repos/asf/ambari/blob/326e23d0/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/helpers/Helpers.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/helpers/Helpers.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/helpers/Helpers.js
index 2117aa3..f4f1d32 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/helpers/Helpers.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/templates/helpers/Helpers.js
@@ -292,9 +292,9 @@
 		var path;
 			if(_.has(options,'profileImageGId')){
 				if(_.isUndefined(size)) 
-					path = "service/content/multimedia/image/"+options.profileImageGId;
+					path = "api/v1/content/multimedia/image/"+options.profileImageGId;
 				else
-					path = "service/content/multimedia/image/"+options.profileImageGId+"/small";
+					path = "api/v1/content/multimedia/image/"+options.profileImageGId+"/small";
 			}else{
 				path = "styles/images/s-avatar.png";
 			}


[18/50] [abbrv] ambari git commit: AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18227. Add unit tests for Log Search components and refactor them as needed - Vol 1. (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0dbc40ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0dbc40ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0dbc40ba

Branch: refs/heads/branch-dev-logsearch
Commit: 0dbc40bae044ede10bef4b495002f709f67ba0ec
Parents: 4562dcd
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Mon Aug 22 18:32:39 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:33:59 2016 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-portal/pom.xml             |  16 +
 .../org/apache/ambari/logsearch/LogSearch.java  |   9 +-
 .../logsearch/common/LogSearchConstants.java    |  11 -
 .../logsearch/common/LogSearchContext.java      |  62 +++
 .../logsearch/common/LogsearchContextUtil.java  |  60 ---
 .../logsearch/common/ManageStartEndTime.java    |  60 +--
 .../ambari/logsearch/common/MessageEnums.java   |  43 +-
 .../ambari/logsearch/common/RequestContext.java | 143 ------
 .../ambari/logsearch/common/SearchCriteria.java |  76 +--
 .../logsearch/common/UserSessionInfo.java       |  46 --
 .../ambari/logsearch/dao/AuditSolrDao.java      |  53 +--
 .../logsearch/dao/ServiceLogsSolrDao.java       |  31 +-
 .../ambari/logsearch/dao/SolrDaoBase.java       | 462 +++++++------------
 .../ambari/logsearch/dao/UserConfigSolrDao.java |  80 ++--
 .../apache/ambari/logsearch/dao/UserDao.java    | 137 ++----
 .../logsearch/dao/UserInfoAttributes.java       |  28 --
 .../logsearch/graph/GraphDataGenerator.java     | 167 +++----
 .../logsearch/graph/GraphDataGeneratorBase.java | 203 ++------
 .../ambari/logsearch/manager/AuditMgr.java      | 323 ++++---------
 .../ambari/logsearch/manager/LogFileMgr.java    |  71 +--
 .../ambari/logsearch/manager/LogsMgr.java       | 148 +++---
 .../ambari/logsearch/manager/MgrBase.java       |  95 ++--
 .../ambari/logsearch/manager/PublicMgr.java     |   2 +-
 .../ambari/logsearch/manager/SessionMgr.java    |  38 +-
 .../ambari/logsearch/manager/UserConfigMgr.java | 139 ++----
 .../ambari/logsearch/query/QueryGeneration.java | 274 ++++-------
 .../logsearch/query/QueryGenerationBase.java    | 292 +++---------
 .../ambari/logsearch/rest/ServiceLogsREST.java  |  10 +-
 .../context/LogsearchContextHolder.java         |  41 --
 .../context/LogsearchSecurityContext.java       |  55 ---
 .../apache/ambari/logsearch/util/BizUtil.java   |  94 ++--
 .../ambari/logsearch/util/CommonUtil.java       |  49 +-
 .../ambari/logsearch/util/ConfigUtil.java       |  40 +-
 .../apache/ambari/logsearch/util/DateUtil.java  | 141 +-----
 .../apache/ambari/logsearch/util/FileUtil.java  |  56 +--
 .../apache/ambari/logsearch/util/JSONUtil.java  | 136 +-----
 .../util/LogsearchPropertiesConfiguration.java  |  89 ----
 .../ambari/logsearch/util/PropertiesUtil.java   |  16 +-
 .../apache/ambari/logsearch/util/QueryBase.java |  16 +-
 .../ambari/logsearch/util/RESTErrorUtil.java    |   2 +-
 .../apache/ambari/logsearch/util/SolrUtil.java  |  75 +--
 .../ambari/logsearch/util/StringUtil.java       |  33 --
 ...LogsearchSecurityContextFormationFilter.java |  28 +-
 ...rchExternalServerAuthenticationProvider.java |   9 +-
 .../LogsearchFileAuthenticationProvider.java    |  16 +-
 .../LogsearchSimpleAuthenticationProvider.java  |   8 +-
 .../common/LogSearchContextUtilTest.java        |  51 ++
 .../common/ManageStartEndTimeTest.java          |  35 ++
 .../ambari/logsearch/dao/AuditSolrDaoTest.java  |  68 +++
 .../logsearch/dao/ServiceLogsSolrDaoTest.java   |  66 +++
 .../ambari/logsearch/dao/SolrDaoBaseTest.java   | 286 ++++++++++++
 .../logsearch/dao/UserConfigSolrDaoTest.java    | 129 ++++++
 .../ambari/logsearch/dao/UserDaoTest.java       |  58 +++
 .../src/test/resources/HadoopServiceConfig.json |  17 +
 .../src/test/resources/applicationContext.xml   |  53 +++
 .../applicationContext_testManagers.xml         |  53 +++
 .../src/test/resources/logsearch.properties     |  32 ++
 .../src/test/resources/user_pass.json           |   8 +
 58 files changed, 1967 insertions(+), 2872 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/pom.xml b/ambari-logsearch/ambari-logsearch-portal/pom.xml
index a886b37..8d345d9 100755
--- a/ambari-logsearch/ambari-logsearch-portal/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-portal/pom.xml
@@ -466,6 +466,17 @@
     </profile>
   </profiles>
   <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <version>3.4</version>
+      <scope>test</scope>
+    </dependency>
     <!-- Spring dependencies -->
     <dependency>
       <groupId>org.springframework</groupId>
@@ -477,6 +488,11 @@
       <artifactId>spring-context</artifactId>
       <version>${spring.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.springframework</groupId>
+      <artifactId>spring-test</artifactId>
+      <version>${spring.version}</version>
+    </dependency>
     <!-- Spring Security -->
     <dependency>
       <groupId>org.springframework.security</groupId>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index d1fbcde..fcebcea 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -24,7 +24,6 @@ import java.net.ServerSocket;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
-import java.util.Timer;
 
 import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.solr.metrics.SolrMetricsLoader;
@@ -46,7 +45,6 @@ import org.eclipse.jetty.util.resource.Resource;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
 import org.eclipse.jetty.webapp.WebAppContext;
 
-
 public class LogSearch {
   private static final Logger logger = Logger.getLogger(LogSearch.class);
 
@@ -71,8 +69,7 @@ public class LogSearch {
  
   public static void main(String[] argv) {
     LogSearch logSearch = new LogSearch();
-    Timer timer = new Timer();
-    timer.schedule(new ManageStartEndTime(), 0, 40000);
+    ManageStartEndTime.manage();
     try {
       logSearch.run(argv);
     } catch (Throwable e) {
@@ -149,7 +146,7 @@ public class LogSearch {
   }
 
   private WebAppContext createBaseWebappContext() throws MalformedURLException {
-    URI webResourceBase = findWebResourceBase(LogSearch.class.getClassLoader());
+    URI webResourceBase = findWebResourceBase();
     WebAppContext context = new WebAppContext();
     context.setBaseResource(Resource.newResource(webResourceBase));
     context.setContextPath(ROOT_CONTEXT);
@@ -168,7 +165,7 @@ public class LogSearch {
     return context;
   }
 
-  private URI findWebResourceBase(ClassLoader classLoader) {
+  private URI findWebResourceBase() {
     URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
         .getResource(WEB_RESOURCE_FOLDER);
     if (fileCompleteUrl != null) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index 5dcdac1..142b29b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -33,7 +33,6 @@ public class LogSearchConstants {
 
   // Application Constants
   public static final String HOST = "H";
-  public static final String SERVICE = "S";
   public static final String COMPONENT = "C";
   public static final String SCROLL_TYPE_AFTER = "after";
   public static final String SCROLL_TYPE_BEFORE = "before";
@@ -44,8 +43,6 @@ public class LogSearchConstants {
   public static final String VALUES = "jsons";
   public static final String FILTER_NAME = "filtername";
   public static final String ROW_TYPE = "rowtype";
-  public static final String USER_CONFIG_DASHBOARD = "dashboard";
-  public static final String USER_CONFIG_HISTORY = "history";
   public static final String COMPOSITE_KEY = "composite_filtername-username";
   public static final String SHARE_NAME_LIST = "share_username_list";
 
@@ -73,17 +70,11 @@ public class LogSearchConstants {
   public static final String MINUS_OPERATOR = "-";
   public static final String NO_OPERATOR = "";
 
-
   //operation
   public static final String EXCLUDE_QUERY = "excludeQuery";
   public static final String INCLUDE_QUERY = "includeQuery";
   public static final String COLUMN_QUERY = "columnQuery";
 
-  //URL PARAMS
-  public static final String GLOBAL_START_TIME = "globalStartTime";
-  public static final String GLOBAL_END_TIME = "globalEndTime";
-
-
   // Seprator's
   public static final String I_E_SEPRATOR = "\\|i\\:\\:e\\|";
 
@@ -128,7 +119,5 @@ public class LogSearchConstants {
   public static final String FACET_GROUP = "group";
   public static final String FACET_GROUP_MAIN = "group.main";
   public static final String FACET_GROUP_FIELD = "group.field"; 
-  public static final String FACET_LIMIT = "facet.limit";
   
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
new file mode 100644
index 0000000..b4b52b3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchContext.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.common;
+
+import java.io.Serializable;
+
+import org.apache.ambari.logsearch.web.model.User;
+
+public class LogSearchContext implements Serializable {
+  
+  private static final long serialVersionUID = 1L;
+  
+  private User user;
+
+  public User getUser() {
+    return user;
+  }
+
+  public void setUser(User user) {
+    this.user = user;
+  }
+
+  //------------------------------------------------------------------------------------------------------
+  
+  private static final ThreadLocal<LogSearchContext> contextThreadLocal = new ThreadLocal<LogSearchContext>();
+
+  public static LogSearchContext getContext() {
+    return contextThreadLocal.get();
+  }
+
+  public static void setContext(LogSearchContext context) {
+    contextThreadLocal.set(context);
+  }
+
+  public static void resetContext() {
+    contextThreadLocal.remove();
+  }
+
+  public static String getCurrentUsername() {
+    LogSearchContext context = LogSearchContext.getContext();
+    if (context != null && context.getUser() != null) {
+        return context.getUser().getUsername();
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
deleted file mode 100644
index 617f2fd..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogsearchContextUtil.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import org.apache.ambari.logsearch.security.context.LogsearchContextHolder;
-import org.apache.ambari.logsearch.security.context.LogsearchSecurityContext;
-
-public class LogsearchContextUtil {
-
-  /**
-   * Singleton class
-   */
-  private LogsearchContextUtil() {
-  }
-
-  public static String getCurrentUsername() {
-    LogsearchSecurityContext context = LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      UserSessionInfo userSession = context.getUserSession();
-      if (userSession != null) {
-        return userSession.getUsername();
-      }
-    }
-    return null;
-  }
-
-  public static UserSessionInfo getCurrentUserSession() {
-    UserSessionInfo userSession = null;
-    LogsearchSecurityContext context = LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      userSession = context.getUserSession();
-    }
-    return userSession;
-  }
-
-  public static RequestContext getCurrentRequestContext() {
-    LogsearchSecurityContext context = LogsearchContextHolder.getSecurityContext();
-    if (context != null) {
-      return context.getRequestContext();
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
index 94b7159..b6aa2d0 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/ManageStartEndTime.java
@@ -20,54 +20,36 @@
 package org.apache.ambari.logsearch.common;
 
 import java.util.Date;
-import java.util.GregorianCalendar;
+import java.util.Timer;
 import java.util.TimerTask;
 
-import org.apache.log4j.Logger;
+import org.apache.commons.lang.time.DateUtils;
 
 public class ManageStartEndTime extends TimerTask {
-  static Logger logger = Logger.getLogger(ManageStartEndTime.class);
-
-  public static Date startDate = new Date();
-
-  public static Date endDate = new Date();
-
-  public ManageStartEndTime() {
-    intailizeStartEndTime();
+  private static final int UPDATE_TIME_IN_SECONDS = 40;
+
+  private static Date startDate;
+  private static Date endDate;
+  
+  public static void manage() {
+    Timer timer = new Timer();
+    timer.schedule(new ManageStartEndTime(), 0, UPDATE_TIME_IN_SECONDS * 1000);
+  }
+  
+  private ManageStartEndTime() {
+    endDate = new Date();
+    startDate = DateUtils.addHours(endDate, -1);
   }
 
   @Override
-  public void run() {
-    if (startDate == null){
-      intailizeStartEndTime();
-    }else{
-      adjustStartEndTime();
+  public synchronized void run() {
+    synchronized (ManageStartEndTime.class) {
+      startDate = DateUtils.addSeconds(startDate, UPDATE_TIME_IN_SECONDS);
+      endDate = DateUtils.addHours(startDate, 1);
     }
   }
 
-  private void adjustStartEndTime() {
-    startDate = addSecondsToDate(startDate, 40);
-    endDate = addHoursToDate(startDate, 1);
-  }
-
-  private Date addSecondsToDate(Date date, int i) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.SECOND, i);
-    return greorianCalendar.getTime();
+  public static synchronized Date[] getStartEndTime() {
+    return new Date[] {startDate, endDate};
   }
-
-  private Date addHoursToDate(Date date, int i) {
-    GregorianCalendar greorianCalendar = new GregorianCalendar();
-    greorianCalendar.setTime(date);
-    greorianCalendar.add(GregorianCalendar.HOUR_OF_DAY, i);
-    return greorianCalendar.getTime();
-  }
-
-  private void intailizeStartEndTime() {
-
-    endDate = new Date();
-    startDate = addHoursToDate(endDate, -1);
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
index 59e698f..786cf99 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/MessageEnums.java
@@ -23,33 +23,30 @@ import org.apache.ambari.logsearch.view.VMessage;
 public enum MessageEnums {
 
   // Common Errors
-  DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"), OPER_NOT_ALLOWED_FOR_STATE(
-    "logsearch.error.oper_not_allowed_for_state",
-    "Operation not allowed in current state"), OPER_NOT_ALLOWED_FOR_ENTITY(
-    "logsearch.error.oper_not_allowed_for_state",
-    "Operation not allowed for entity"), OPER_NO_PERMISSION(
-    "logsearch.error.oper_no_permission",
-    "User doesn't have permission to perform this operation"), DATA_NOT_UPDATABLE(
-    "logsearch.error.data_not_updatable", "Data not updatable"), ERROR_CREATING_OBJECT(
-    "logsearch.error.create_object", "Error creating object"), ERROR_DUPLICATE_OBJECT(
-    "logsearch.error.duplicate_object", "Error creating duplicate object"), ERROR_SYSTEM(
-    "logsearch.error.system", "System Error. Please try later."),
-    SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details check the logs or configuration."),
+  DATA_NOT_FOUND("logsearch.error.data_not_found", "Data not found"),
+  OPER_NOT_ALLOWED_FOR_STATE("logsearch.error.oper_not_allowed_for_state", "Operation not allowed in current state"),
+  OPER_NOT_ALLOWED_FOR_ENTITY("logsearch.error.oper_not_allowed_for_state", "Operation not allowed for entity"),
+  OPER_NO_PERMISSION("logsearch.error.oper_no_permission", "User doesn't have permission to perform this operation"),
+  DATA_NOT_UPDATABLE("logsearch.error.data_not_updatable", "Data not updatable"),
+  ERROR_CREATING_OBJECT("logsearch.error.create_object", "Error creating object"),
+  ERROR_DUPLICATE_OBJECT("logsearch.error.duplicate_object", "Error creating duplicate object"),
+  ERROR_SYSTEM("logsearch.error.system", "System Error. Please try later."),
+  SOLR_ERROR("logsearch.solr.error","Something went wrong, For more details check the logs or configuration."),
 
   // Common Validations
-  INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid password"), INVALID_INPUT_DATA(
-    "logsearch.validation.invalid_input_data", "Invalid input data"), NO_INPUT_DATA(
-    "logsearch.validation.no_input_data", "Input data is not provided"), INPUT_DATA_OUT_OF_BOUND(
-    "logsearch.validation.data_out_of_bound", "Input data if out of bound"), NO_NAME(
-    "logsearch.validation.no_name", "Name is not provided"), NO_OR_INVALID_COUNTRY_ID(
-    "logsearch.validation.no_country_id", "Valid Country Id was not provided"), NO_OR_INVALID_CITY_ID(
-    "logsearch.validation.no_city_id", "Valid City Id was not provided"), NO_OR_INVALID_STATE_ID(
-    "logsearch.validation.no_state_id", "Valid State Id was not provided");
+  INVALID_PASSWORD("logsearch.validation.invalid_password", "Invalid password"),
+  INVALID_INPUT_DATA("logsearch.validation.invalid_input_data", "Invalid input data"),
+  NO_INPUT_DATA("logsearch.validation.no_input_data", "Input data is not provided"),
+  INPUT_DATA_OUT_OF_BOUND("logsearch.validation.data_out_of_bound", "Input data if out of bound"),
+  NO_NAME("logsearch.validation.no_name", "Name is not provided"),
+  NO_OR_INVALID_COUNTRY_ID("logsearch.validation.no_country_id", "Valid Country Id was not provided"),
+  NO_OR_INVALID_CITY_ID("logsearch.validation.no_city_id", "Valid City Id was not provided"),
+  NO_OR_INVALID_STATE_ID("logsearch.validation.no_state_id", "Valid State Id was not provided");
 
-  String rbKey;
-  String messageDesc;
+  private String rbKey;
+  private String messageDesc;
 
-  MessageEnums(String rbKey, String messageDesc) {
+  private MessageEnums(String rbKey, String messageDesc) {
     this.rbKey = rbKey;
     this.messageDesc = messageDesc;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
deleted file mode 100644
index 83ed3bd..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/RequestContext.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import java.io.Serializable;
-
-public class RequestContext implements Serializable {
-  private static final long serialVersionUID = -7083383106845193385L;
-  String ipAddress = null;
-  String msaCookie = null;
-  String userAgent = null;
-  String requestURL = null;
-  String serverRequestId = null;
-  String clientTimeOffset;
-  boolean isSync = true;
-  long startTime = System.currentTimeMillis();
-
-  /**
-   * @return the ipAddress
-   */
-  public String getIpAddress() {
-    return ipAddress;
-  }
-
-  /**
-   * @param ipAddress the ipAddress to set
-   */
-  public void setIpAddress(String ipAddress) {
-    this.ipAddress = ipAddress;
-  }
-
-  /**
-   * @return the msaCookie
-   */
-  public String getMsaCookie() {
-    return msaCookie;
-  }
-
-  /**
-   * @param msaCookie the msaCookie to set
-   */
-  public void setMsaCookie(String msaCookie) {
-    this.msaCookie = msaCookie;
-  }
-
-  /**
-   * @return the userAgent
-   */
-  public String getUserAgent() {
-    return userAgent;
-  }
-
-  /**
-   * @param userAgent the userAgent to set
-   */
-  public void setUserAgent(String userAgent) {
-    this.userAgent = userAgent;
-  }
-
-  /**
-   * @return the serverRequestId
-   */
-  public String getServerRequestId() {
-    return serverRequestId;
-  }
-
-  /**
-   * @param serverRequestId the serverRequestId to set
-   */
-  public void setServerRequestId(String serverRequestId) {
-    this.serverRequestId = serverRequestId;
-  }
-
-  /**
-   * @return the isSync
-   */
-  public boolean isSync() {
-    return isSync;
-  }
-
-  /**
-   * @param isSync the isSync to set
-   */
-  public void setSync(boolean isSync) {
-    this.isSync = isSync;
-  }
-
-  /**
-   * @return the requestURL
-   */
-  public String getRequestURL() {
-    return requestURL;
-  }
-
-  /**
-   * @param requestURL the requestURL to set
-   */
-  public void setRequestURL(String requestURL) {
-    this.requestURL = requestURL;
-  }
-
-  /**
-   * @return the startTime
-   */
-  public long getStartTime() {
-    return startTime;
-  }
-
-  /**
-   * @param startTime the startTime to set
-   */
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    return "RequestContext [ipAddress=" + ipAddress + ", msaCookie=" + msaCookie + ", userAgent=" + userAgent
-      + ", requestURL=" + requestURL + ",serverRequestId=" + serverRequestId + ", isSync=" + isSync
-      + ", startTime=" + startTime + "]";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
index d0facbc..2cf2139 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/SearchCriteria.java
@@ -49,44 +49,35 @@ public class SearchCriteria {
 
   public SearchCriteria(HttpServletRequest request) {
     try {
-      if (request.getParameter("startIndex") != null
-        && (!request.getParameter("startIndex").isEmpty())) {
-        this.startIndex = new Integer(
-          request.getParameter("startIndex"));
+      if (request.getParameter("startIndex") != null && (!request.getParameter("startIndex").isEmpty())) {
+        this.startIndex = new Integer(request.getParameter("startIndex"));
       }
-      if (request.getParameter("page") != null
-        && (!request.getParameter("page").isEmpty())) {
+      if (request.getParameter("page") != null && (!request.getParameter("page").isEmpty())) {
         this.page = new Integer(request.getParameter("page"));
       }
-      if (request.getParameter("pageSize") != null
-        && (!request.getParameter("pageSize").isEmpty())) {
+      if (request.getParameter("pageSize") != null && (!request.getParameter("pageSize").isEmpty())) {
         this.maxRows = new Integer(request.getParameter("pageSize"));
       } else {
-        this.maxRows = PropertiesUtil.getIntProperty("db.maxResults",
-          50);
+        this.maxRows = PropertiesUtil.getIntProperty("db.maxResults", 50);
       }
     } catch (NumberFormatException e) {
       // do nothing
     }
 
     // Sort fields
-    if (request.getParameter("sortBy") != null
-      && (!request.getParameter("sortBy").isEmpty())) {
+    if (request.getParameter("sortBy") != null && (!request.getParameter("sortBy").isEmpty())) {
       this.sortBy = "" + request.getParameter("sortBy");
     }
-    if (request.getParameter("sortType") != null
-      && (!request.getParameter("sortType").isEmpty())) {
+    if (request.getParameter("sortType") != null && (!request.getParameter("sortType").isEmpty())) {
       this.sortType = "" + request.getParameter("sortType");
     }
 
     // url params
-    if (request.getParameter("start_time") != null
-      && (!request.getParameter("start_time").isEmpty())) {
+    if (request.getParameter("start_time") != null && (!request.getParameter("start_time").isEmpty())) {
       this.globalStartTime = "" + request.getParameter("start_time");
       this.urlParamMap.put("globalStartTime", request.getParameter("start_time"));
     }
-    if (request.getParameter("end_time") != null
-      && (!request.getParameter("end_time").isEmpty())) {
+    if (request.getParameter("end_time") != null && (!request.getParameter("end_time").isEmpty())) {
       this.globalEndTime = "" + request.getParameter("end_time");
       this.urlParamMap.put("globalEndTime", request.getParameter("end_time"));
     }
@@ -164,31 +155,24 @@ public class SearchCriteria {
   public void addRequiredServiceLogsParams(HttpServletRequest request) {
     this.addParam("advanceSearch", StringEscapeUtils.unescapeXml(request.getParameter("advanceSearch")));
     this.addParam("q", request.getParameter("q"));
-    this.addParam("treeParams", StringEscapeUtils
-      .unescapeHtml(request.getParameter("treeParams")));
+    this.addParam("treeParams", StringEscapeUtils.unescapeHtml(request.getParameter("treeParams")));
     this.addParam("level", request.getParameter("level"));
     this.addParam("gMustNot", request.getParameter("gMustNot"));
     this.addParam("from", request.getParameter("from"));
     this.addParam("to", request.getParameter("to"));
     this.addParam("selectComp", request.getParameter("mustBe"));
     this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils
-      .unescapeXml(request.getParameter("gEMessage")));
-    this
-      .addParam("eMessage", StringEscapeUtils.unescapeXml(request
-        .getParameter("eMessage")));
+    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
+    this.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
+    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
     this.addParam(LogSearchConstants.BUNDLE_ID, request.getParameter(LogSearchConstants.BUNDLE_ID));
     this.addParam("host_name", request.getParameter("host_name"));
     this.addParam("component_name", request.getParameter("component_name"));
     this.addParam("file_name", request.getParameter("file_name"));
     this.addParam("startDate", request.getParameter("start_time"));
     this.addParam("endDate", request.getParameter("end_time"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("includeQuery")));
+    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
+    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
   }
 
   /**
@@ -196,20 +180,14 @@ public class SearchCriteria {
    */
   public void addRequiredAuditLogsParams(HttpServletRequest request) {
     this.addParam("q", request.getParameter("q"));
-    this.addParam("columnQuery", StringEscapeUtils
-      .unescapeXml(request.getParameter("columnQuery")));
-    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("iMessage")));
-    this.addParam("gEMessage", StringEscapeUtils
-      .unescapeXml(request.getParameter("gEMessage")));
-    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request
-      .getParameter("eMessage")));
+    this.addParam("columnQuery", StringEscapeUtils.unescapeXml(request.getParameter("columnQuery")));
+    this.addParam("iMessage", StringEscapeUtils.unescapeXml(request.getParameter("iMessage")));
+    this.addParam("gEMessage", StringEscapeUtils.unescapeXml(request.getParameter("gEMessage")));
+    this.addParam("eMessage", StringEscapeUtils.unescapeXml(request.getParameter("eMessage")));
     this.addParam("includeString", request.getParameter("mustBe"));
     this.addParam("unselectComp", request.getParameter("mustNot"));
-    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("excludeQuery")));
-    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(
-      request.getParameter("includeQuery")));
+    this.addParam("excludeQuery", StringEscapeUtils.unescapeXml(request.getParameter("excludeQuery")));
+    this.addParam("includeQuery", StringEscapeUtils.unescapeXml(request.getParameter("includeQuery")));
     this.addParam("startTime", request.getParameter("from"));
     this.addParam("endTime", request.getParameter("to"));
   }
@@ -220,9 +198,9 @@ public class SearchCriteria {
    */
   public void addParam(String name, Object value) {
     String solrValue = PropertiesUtil.getProperty(name);
-    if (solrValue == null || solrValue.isEmpty()){
+    if (solrValue == null || solrValue.isEmpty()) {
       paramList.put(name, value);
-    }else {
+    } else {
       try {
         String propertyFieldMappings[] = solrValue.split(",");
         HashMap<String, String> propertyFieldValue = new HashMap<String, String>();
@@ -253,14 +231,6 @@ public class SearchCriteria {
   }
 
   /**
-   * @param string
-   * @param caId
-   */
-  public Object removeParam(String name) {
-    return paramList.remove(name);
-  }
-
-  /**
    * @return the nullParamList
    */
   public Set<String> getNullParamList() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
deleted file mode 100644
index 78fea31..0000000
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/UserSessionInfo.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.common;
-
-import java.io.Serializable;
-
-import org.apache.ambari.logsearch.web.model.User;
-
-public class UserSessionInfo implements Serializable {
-
-  private static final long serialVersionUID = 1L;
-
-  private User user;
-
-  public User getUser() {
-    return user;
-  }
-
-  public void setUser(User user) {
-    this.user = user;
-  }
-
-  public String getUsername() {
-    if (user != null) {
-      return user.getUsername();
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 5a56ad7..a6f77e9 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -19,13 +19,14 @@
 
 package org.apache.ambari.logsearch.dao;
 
-import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 
 import javax.annotation.PostConstruct;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
 
@@ -35,50 +36,36 @@ public class AuditSolrDao extends SolrDaoBase {
   static private Logger logger = Logger.getLogger(AuditSolrDao.class);
   
   public AuditSolrDao() {
-    super(LOG_TYPE.AUDIT);
+    super(LogType.AUDIT);
   }
 
   @PostConstruct
   public void postConstructor() {
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.audit.logs.url");
     String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.audit.logs.zk_connect_string");
-    String collection = PropertiesUtil.getProperty(
-      "logsearch.solr.collection.audit.logs", "audit_logs");
-    String aliasNameIn = PropertiesUtil.getProperty(
-        "logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
-    String rangerAuditCollection = PropertiesUtil.getProperty(
-        "logsearch.ranger.audit.logs.collection.name");
-    String splitInterval = PropertiesUtil.getProperty(
-      "logsearch.audit.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty(
-      "logsearch.solr.audit.logs.config.name", "audit_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty(
-      "logsearch.collection.audit.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty(
-      "logsearch.collection.audit.logs.replication.factor", 1);
+    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.audit.logs", "audit_logs");
+    String aliasNameIn = PropertiesUtil.getProperty("logsearch.solr.audit.logs.alias.name", "audit_logs_alias");
+    String rangerAuditCollection = PropertiesUtil.getProperty("logsearch.ranger.audit.logs.collection.name");
+    String splitInterval = PropertiesUtil.getProperty("logsearch.audit.logs.split.interval.mins", "none");
+    String configName = PropertiesUtil.getProperty("logsearch.solr.audit.logs.config.name", "audit_logs");
+    int numberOfShards = PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.numshards", 1);
+    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.audit.logs.replication.factor", 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
-      boolean createAlias = false;
-      if (aliasNameIn != null && rangerAuditCollection != null
-          && rangerAuditCollection.trim().length() > 0) {
-        createAlias = true;
-      }
+      
+      boolean createAlias = (aliasNameIn != null && !StringUtils.isBlank(rangerAuditCollection));
       boolean needToPopulateSchemaField = !createAlias;
-      setupCollections(splitInterval, configName, numberOfShards,
-          replicationFactor, needToPopulateSchemaField);
-      if(createAlias) {
-        Collection<String> collectionsIn = new ArrayList<String>();
-        collectionsIn.add(collection);
-        collectionsIn.add(rangerAuditCollection.trim());
+      
+      setupCollections(splitInterval, configName, numberOfShards, replicationFactor, needToPopulateSchemaField);
+      
+      if (createAlias) {
+        Collection<String> collectionsIn = Arrays.asList(collection, rangerAuditCollection.trim());
         setupAlias(aliasNameIn, collectionsIn);
       }
     } catch (Exception e) {
-      logger.error(
-        "Error while connecting to Solr for audit logs : solrUrl="
-          + solrUrl + ", zkConnectString=" + zkConnectString
-          + ", collection=" + collection, e);
+      logger.error("Error while connecting to Solr for audit logs : solrUrl=" + solrUrl + ", zkConnectString=" +
+          zkConnectString + ", collection=" + collection, e);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index 5aba6d1..af6d62d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logsearch.dao;
 
 import javax.annotation.PostConstruct;
 
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.log4j.Logger;
 import org.springframework.stereotype.Component;
@@ -29,10 +29,10 @@ import org.springframework.stereotype.Component;
 @Component
 public class ServiceLogsSolrDao extends SolrDaoBase {
 
-  static private Logger logger = Logger.getLogger(ServiceLogsSolrDao.class);
+  private static final Logger logger = Logger.getLogger(ServiceLogsSolrDao.class);
   
   public ServiceLogsSolrDao() {
-    super(LOG_TYPE.SERVICE);
+    super(LogType.SERVICE);
   }
 
   @PostConstruct
@@ -40,27 +40,18 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
     logger.info("postConstructor() called.");
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
     String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
-    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",
-      "hadoop_logs");
-    String splitInterval = PropertiesUtil.getProperty(
-      "logsearch.service.logs.split.interval.mins", "none");
-    String configName = PropertiesUtil.getProperty(
-      "logsearch.solr.service.logs.config.name", "hadoop_logs");
-    int numberOfShards = PropertiesUtil.getIntProperty(
-      "logsearch.collection.service.logs.numshards", 1);
-    int replicationFactor = PropertiesUtil.getIntProperty(
-      "logsearch.collection.service.logs.replication.factor", 1);
+    String collection = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs", "hadoop_logs");
+    String splitInterval = PropertiesUtil.getProperty("logsearch.service.logs.split.interval.mins", "none");
+    String configName = PropertiesUtil.getProperty("logsearch.solr.service.logs.config.name", "hadoop_logs");
+    int numberOfShards = PropertiesUtil.getIntProperty("logsearch.collection.service.logs.numshards", 1);
+    int replicationFactor = PropertiesUtil.getIntProperty("logsearch.collection.service.logs.replication.factor", 1);
 
     try {
       connectToSolr(solrUrl, zkConnectString, collection);
-      setupCollections(splitInterval, configName, numberOfShards,
-        replicationFactor,true);
+      setupCollections(splitInterval, configName, numberOfShards, replicationFactor, true);
     } catch (Exception e) {
-      logger.error(
-        "error while connecting to Solr for service logs : solrUrl="
-          + solrUrl + ", zkConnectString=" + zkConnectString
-          + ", collection=" + collection, e);
+      logger.error("error while connecting to Solr for service logs : solrUrl=" + solrUrl + ", zkConnectString=" +
+          zkConnectString + ", collection=" + collection, e);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0dbc40ba/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index cda5e26..2129f9e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -24,16 +24,16 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
-import org.apache.ambari.logsearch.common.LogsearchContextUtil;
+
+import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.MessageEnums;
-import org.apache.ambari.logsearch.manager.MgrBase.LOG_TYPE;
+import org.apache.ambari.logsearch.manager.MgrBase.LogType;
 import org.apache.ambari.logsearch.util.ConfigUtil;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.PropertiesUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
-import org.apache.ambari.logsearch.util.StringUtil;
+import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -58,104 +58,87 @@ import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public abstract class SolrDaoBase {
-  static private Logger logger = Logger.getLogger(SolrDaoBase.class);
+  private static final Logger logger = Logger.getLogger(SolrDaoBase.class);
+  private static final Logger logPerformance = Logger.getLogger("org.apache.ambari.logsearch.performance");
   
   public HashMap<String, String> schemaFieldsNameMap = new HashMap<String, String>();
   public HashMap<String, String> schemaFieldTypeMap = new HashMap<String, String>();
-  
-  private static Logger logPerformance = Logger
-    .getLogger("org.apache.ambari.logsearch.performance");
 
   private static final String ROUTER_FIELD = "_router_field_";
- 
-  protected LOG_TYPE logType;
+  
+  private static final int SETUP_RETRY_SECOND = 30;
+  private static final int SETUP_UPDATE_SECOND = 10*60; //10 min
+  private static final int ALIAS_SETUP_RETRY_SECOND = 30*60;
 
-  @Autowired
-  StringUtil stringUtil;
+  private LogType logType;
 
   @Autowired
-  JSONUtil jsonUtil;
-  
+  protected JSONUtil jsonUtil;
   @Autowired
-  RESTErrorUtil restErrorUtil;
-
-  String collectionName = null;
+  protected RESTErrorUtil restErrorUtil;
+
+  @VisibleForTesting
+  protected String collectionName = null;
+  @VisibleForTesting
+  protected SolrClient solrClient = null;
+  @VisibleForTesting
+  protected CloudSolrClient solrClouldClient = null;
+  @VisibleForTesting
+  protected boolean isZkConnectString = false;
   
-  String aliasName = null;
-  Collection<String> aliasCollectionList = new ArrayList<String>();
-
-  private SolrClient solrClient = null;
-  CloudSolrClient solrClouldClient = null;
-
-  boolean isSolrCloud = true;
-  String solrDetail = "";
-
-  boolean isSolrInitialized = false;
-
-  private boolean setup_status = false;
+  private String solrDetail = "";
 
   private boolean populateFieldsThreadActive = false;
-
-  int SETUP_RETRY_SECOND = 30;
-  int SETUP_UPDATE_SECOND = 10*60; //10 min
-  int ALIAS_SETUP_RETRY_SECOND = 30*60; //30 minutes
   
-  private boolean isZkConnectString=false;//by default its false
-  
-  //set logtype
-  public SolrDaoBase(LOG_TYPE logType) {
+  protected SolrDaoBase(LogType logType) {
     this.logType = logType;
   }
 
-  public SolrClient connectToSolr(String url, String zkConnectString,
-                                  String collection) throws Exception {
+  protected SolrClient connectToSolr(String url, String zkConnectString, String collection) throws Exception {
     this.collectionName = collection;
-    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection
-      + ", url=" + url;
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection + ", url=" + url;
 
     logger.info("connectToSolr() " + solrDetail);
-    if (stringUtil.isEmpty(collection)) {
-      throw new Exception("For solr, collection name is mandatory. "
-        + solrDetail);
+    if (StringUtils.isBlank(collection)) {
+      throw new Exception("For solr, collection name is mandatory. " + solrDetail);
     }
+    
     setupSecurity();
-    if (!stringUtil.isEmpty(zkConnectString)) {
-      isZkConnectString=true;
+    
+    if (solrClient != null) {
+      return solrClient;
+    }
+      
+    if (!StringUtils.isBlank(zkConnectString)) {
+      isZkConnectString = true;
       solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
-      logger.info("Using zookeepr. " + solrDetail);
+      logger.info("Using zookeeper. " + solrDetail);
       solrClouldClient = new CloudSolrClient(zkConnectString);
       solrClouldClient.setDefaultCollection(collection);
       solrClient = solrClouldClient;
       int waitDurationMS = 3 * 60 * 1000;
       checkSolrStatus(waitDurationMS);
     } else {
-      if (stringUtil.isEmpty(url)) {
-        throw new Exception("Both zkConnectString and URL are empty. zkConnectString="
-          + zkConnectString + ", collection=" + collection + ", url="
-          + url);
+      if (StringUtils.isBlank(url)) {
+        throw new Exception("Both zkConnectString and URL are empty. zkConnectString=" + zkConnectString + ", " +
+            "collection=" + collection + ", url=" + url);
       }
       solrDetail = "collection=" + collection + ", url=" + url;
       String collectionURL = url + "/" + collection;
       logger.info("Connecting to  solr : " + collectionURL);
       solrClient = new HttpSolrClient(collectionURL);
-
     }
-    // populateSchemaFields(collection);
-    return solrClient;
-  }
-
-  public SolrClient getSolrClient() {
     return solrClient;
   }
-
+  
   /**
    * This will try to get the collections from the Solr. Ping doesn't work if
    * collection is not given
-   *
-   * @param waitDurationMS
    */
-  public boolean checkSolrStatus(int waitDurationMS) {
+  protected boolean checkSolrStatus(int waitDurationMS) {
     boolean status = false;
     try {
       long beginTimeMS = System.currentTimeMillis();
@@ -166,10 +149,7 @@ public abstract class SolrDaoBase {
         try {
           List<String> collectionList = getCollections();
           if (collectionList != null) {
-            logger.info("checkSolrStatus(): Solr getCollections() is success. solr="
-              + solrDetail
-              + ", collectionList="
-              + collectionList);
+            logger.info("checkSolrStatus(): Solr getCollections() is success. solr=" + solrDetail + ", collectionList=" + collectionList);
             status = true;
             break;
           }
@@ -177,17 +157,12 @@ public abstract class SolrDaoBase {
           logger.error("Error while doing Solr check", ex);
         }
         if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
-          logger.error("Solr is not reachable even after "
-            + (System.currentTimeMillis() - beginTimeMS)
-            + " ms. If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr="
-            + solrDetail);
+          logger.error("Solr is not reachable even after " + (System.currentTimeMillis() - beginTimeMS) + " ms. " +
+              "If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr=" + solrDetail);
           break;
         } else {
-          logger.warn("Solr is not not reachable yet. getCollections() attempt count="
-            + pingCount
-            + ". Will sleep for "
-            + waitIntervalMS
-            + " ms and try again." + " solr=" + solrDetail);
+          logger.warn("Solr is not not reachable yet. getCollections() attempt count=" + pingCount + ". " +
+              "Will sleep for " + waitIntervalMS + " ms and try again." + " solr=" + solrDetail);
         }
         Thread.sleep(waitIntervalMS);
 
@@ -198,37 +173,34 @@ public abstract class SolrDaoBase {
     return status;
   }
 
-  public void setupAlias(final String aliasNameIn, final Collection<String> collectionListIn ) throws Exception {
-    if( aliasNameIn == null || collectionListIn== null || collectionListIn.size() == 0 || solrClouldClient == null) {
-      logger.info("Will not create alias " + aliasNameIn + " for "
-        + (collectionListIn==null?null: collectionListIn.toString()) + ", solrCloudClient=" + solrClouldClient);
+  protected void setupAlias(final String aliasNameIn, final Collection<String> collectionListIn ) throws Exception {
+    if (aliasNameIn == null || collectionListIn == null || collectionListIn.size() == 0 || solrClouldClient == null) {
+      logger.info("Will not create alias " + aliasNameIn + " for " +
+          (collectionListIn == null ? null: collectionListIn.toString()) + ", solrCloudClient=" + solrClouldClient);
       return;
     }
-    logger.info("setupAlias " + aliasNameIn + " for " + (collectionListIn==null?null: collectionListIn.toString()));
-    aliasName = aliasNameIn;
-    aliasCollectionList = collectionListIn;
-
+    
+    logger.info("setupAlias " + aliasNameIn + " for " + (collectionListIn == null ? null: collectionListIn.toString()));
     // Start a background thread to do setup
     Thread setupThread = new Thread("setup_alias_" + aliasNameIn) {
       @Override
       public void run() {
-        logger.info("Started monitoring thread to check availability of Solr server. alias="
-            + aliasNameIn + ", collections=" + collectionListIn.toString());
+        logger.info("Started monitoring thread to check availability of Solr server. alias=" + aliasNameIn +
+            ", collections=" + collectionListIn.toString());
         int retryCount = 0;
         while (true) {
           try {
-            int count = createAlias(aliasNameIn,collectionListIn);
+            int count = createAlias(aliasNameIn, collectionListIn);
             if (count > 0) {
               solrClouldClient.setDefaultCollection(aliasNameIn);
-              if( count == collectionListIn.size()) {
-                logger.info("Setup for alias " + aliasNameIn
-                    + " is successful. Exiting setup retry thread. Collections=" + collectionListIn);
+              if (count == collectionListIn.size()) {
+                logger.info("Setup for alias " + aliasNameIn + " is successful. Exiting setup retry thread. " +
+                    "Collections=" + collectionListIn);
                 populateSchemaFields();
                 break;
               }
             } else {
-              logger.warn("Not able to create alias="
-                  + aliasNameIn + ", retryCount=" + retryCount);
+              logger.warn("Not able to create alias=" + aliasNameIn + ", retryCount=" + retryCount);
             }
           } catch (Exception e) {
             logger.error("Error setting up alias=" + aliasNameIn, e);
@@ -236,8 +208,7 @@ public abstract class SolrDaoBase {
           try {
             Thread.sleep(ALIAS_SETUP_RETRY_SECOND * 1000);
           } catch (InterruptedException sleepInterrupted) {
-            logger.info("Sleep interrupted while setting up alias "
-                + aliasNameIn);
+            logger.info("Sleep interrupted while setting up alias " + aliasNameIn);
             break;
           }
           retryCount++;
@@ -245,93 +216,64 @@ public abstract class SolrDaoBase {
       }
     };
     setupThread.setDaemon(true);
-    setupThread.start();     
+    setupThread.start();
   }
   
-  /**
-   * @param aliasNameIn
-   * @param collectionListIn
-   * @return
-   * @throws IOException 
-   * @throws SolrServerException 
-   */
-  protected int createAlias(String aliasNameIn,
-      Collection<String> collectionListIn) throws SolrServerException, IOException {
-    List<String> collections = getCollections();
-    List<String> collectionToAdd = new ArrayList<String>();
-    for (String col : collections) {
-      if( collectionListIn.contains(col)) {
-        collectionToAdd.add(col);
-      }
-    }
+  private int createAlias(String aliasNameIn, Collection<String> collectionListIn) throws SolrServerException, IOException {
+    List<String> collectionToAdd = getCollections();
+    collectionToAdd.retainAll(collectionListIn);
+    
     String collectionsCSV = null;
-    if( collectionToAdd.size() > 0 ) {
-      for (String col : collectionToAdd) {
-        if(collectionsCSV == null) {
-          collectionsCSV = col;
-        } else {
-          collectionsCSV = collectionsCSV + ","  + col;
-        }
-      }
+    if (!collectionToAdd.isEmpty()) {
+      collectionsCSV = StringUtils.join(collectionToAdd, ',');
       CollectionAdminRequest.CreateAlias aliasCreateRequest = new CollectionAdminRequest.CreateAlias(); 
       aliasCreateRequest.setAliasName(aliasNameIn);
       aliasCreateRequest.setAliasedCollections(collectionsCSV);
       CollectionAdminResponse createResponse = aliasCreateRequest.process(solrClouldClient);
       if (createResponse.getStatus() != 0) {
-        logger.error("Error creating alias. alias="
-        + aliasNameIn + ", collectionList=" + collectionsCSV
-        + ", solrDetail=" + solrDetail + ", response="
-        + createResponse);
+        logger.error("Error creating alias. alias=" + aliasNameIn + ", collectionList=" + collectionsCSV +
+            ", solrDetail=" + solrDetail + ", response=" + createResponse);
         return 0;
       }
     } 
-    if( collectionToAdd.size() == collectionListIn.size()) {
-      logger.info("Created alias for all collections. alias=" + aliasNameIn + ", collectionsCSV="
-          + collectionsCSV + ", solrDetail=" + solrDetail);        
+    if ( collectionToAdd.size() == collectionListIn.size()) {
+      logger.info("Created alias for all collections. alias=" + aliasNameIn + ", collectionsCSV=" + collectionsCSV +
+          ", solrDetail=" + solrDetail);
     } else {
-      logger.info("Created alias for " + collectionToAdd.size() + " out of " + 
-          + collectionListIn.size() + " collections. alias=" + aliasNameIn 
-          + ", collectionsCSV=" + collectionsCSV + ", solrDetail=" + solrDetail);
+      logger.info("Created alias for " + collectionToAdd.size() + " out of " + collectionListIn.size() + " collections. " +
+          "alias=" + aliasNameIn + ", collectionsCSV=" + collectionsCSV + ", solrDetail=" + solrDetail);
     }
     return collectionToAdd.size();
   }
 
-  public void setupCollections(final String splitMode, final String configName,
-      final int numberOfShards, final int replicationFactor,boolean needToPopulateSchemaField) throws Exception {
+  protected void setupCollections(final String splitInterval, final String configName, final int numberOfShards,
+      final int replicationFactor, boolean needToPopulateSchemaField) throws Exception {
     if (isZkConnectString) {
-      setup_status = createCollectionsIfNeeded(splitMode, configName,
-          numberOfShards, replicationFactor);
-      logger.info("Setup status for " + collectionName + " is " + setup_status);
-      if (!setup_status) {
+      boolean setupStatus = createCollectionsIfNeeded(splitInterval, configName, numberOfShards, replicationFactor);
+      logger.info("Setup status for " + collectionName + " is " + setupStatus);
+      if (!setupStatus) {
         // Start a background thread to do setup
         Thread setupThread = new Thread("setup_collection_" + collectionName) {
           @Override
           public void run() {
-            logger
-                .info("Started monitoring thread to check availability of Solr server. collection="
-                    + collectionName);
+            logger.info("Started monitoring thread to check availability of Solr server. collection=" + collectionName);
             int retryCount = 0;
             while (true) {
               try {
                 Thread.sleep(SETUP_RETRY_SECOND * 1000);
                 retryCount++;
-                setup_status = createCollectionsIfNeeded(splitMode, configName,
-                    numberOfShards, replicationFactor);
-                if (setup_status) {
-                  logger.info("Setup for collection " + collectionName
-                      + " is successful. Exiting setup retry thread");
+                boolean setupStatus = createCollectionsIfNeeded(splitInterval, configName, numberOfShards, replicationFactor);
+                if (setupStatus) {
+                  logger.info("Setup for collection " + collectionName + " is successful. Exiting setup retry thread");
                   break;
                 }
               } catch (InterruptedException sleepInterrupted) {
-                logger.info("Sleep interrupted while setting up collection "
-                    + collectionName);
+                logger.info("Sleep interrupted while setting up collection " + collectionName);
                 break;
               } catch (Exception e) {
-                logger
-                    .error("Error setting up collection=" + collectionName, e);
+                logger.error("Error setting up collection=" + collectionName, e);
               }
-              logger.error("Error setting collection. collection="
-                  + collectionName + ", retryCount=" + retryCount);
+              logger.error("Error setting collection. collection=" + collectionName + ", retryCount=" + retryCount);
             }
           }
         };
@@ -339,46 +281,39 @@ public abstract class SolrDaoBase {
         setupThread.start();
       }
     }
-    if(needToPopulateSchemaField){
+    
+    if (needToPopulateSchemaField){
       populateSchemaFields();
     }
   }
 
-  public boolean createCollectionsIfNeeded(final String splitMode,
-                                           final String configName, final int numberOfShards,
-                                           final int replicationFactor) {
+  private boolean createCollectionsIfNeeded(String splitInterval, String configName, int numberOfShards, int replicationFactor) {
     boolean result = false;
     try {
       List<String> allCollectionList = getCollections();
-      if (splitMode.equalsIgnoreCase("none")) {
-        // Just create regular collection
-        result = createCollection(collectionName, configName,
-          numberOfShards, replicationFactor, allCollectionList);
+      if (splitInterval.equalsIgnoreCase("none")) {
+        result = createCollection(configName, numberOfShards, replicationFactor, allCollectionList);
       } else {
-        result = setupCollectionsWithImplicitRouting(splitMode,
-          configName, numberOfShards, replicationFactor, allCollectionList);
+        result = setupCollectionsWithImplicitRouting(configName, numberOfShards, replicationFactor, allCollectionList);
       }
     } catch (Exception ex) {
-      logger.error("Error creating collection. collectionName="
-        + collectionName, ex);
+      logger.error("Error creating collection. collectionName=" + collectionName, ex);
     }
     return result;
   }
 
-  public List<String> getCollections() throws SolrServerException,
+  private List<String> getCollections() throws SolrServerException,
     IOException {
     try {
       CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List();
       CollectionAdminResponse response = colListReq.process(solrClient);
       if (response.getStatus() != 0) {
-        logger.error("Error getting collection list from solr.  response="
-          + response);
+        logger.error("Error getting collection list from solr.  response=" + response);
         return null;
       }
 
       @SuppressWarnings("unchecked")
-      List<String> allCollectionList = (List<String>) response
-        .getResponse().get("collections");
+      List<String> allCollectionList = (List<String>) response.getResponse().get("collections");
       return allCollectionList;
     } catch (SolrException e) {
       logger.error(e);
@@ -386,91 +321,61 @@ public abstract class SolrDaoBase {
     }
   }
 
-  public boolean setupCollectionsWithImplicitRouting(String splitMode,
-                                                     String configName, int numberOfShards, int replicationFactor,
+  private boolean setupCollectionsWithImplicitRouting(String configName, int numberOfShards, int replicationFactor,
                                                      List<String> allCollectionList) throws Exception {
-    logger.info("setupCollectionsWithImplicitRouting(). collectionName="
-      + collectionName + ", numberOfShards=" + numberOfShards);
-    return createCollectionWithImplicitRoute(collectionName, configName,
-      numberOfShards, replicationFactor, allCollectionList);
-  }
-
-  public boolean createCollectionWithImplicitRoute(String colName,
-                                                   String configName, int numberOfShards, int replicationFactor,
-                                                   List<String> allCollectionList) throws SolrServerException,
-    IOException {
+    logger.info("setupCollectionsWithImplicitRouting(). collectionName=" + collectionName + ", numberOfShards=" + numberOfShards);
 
-    // Default is true, because if the collection and shard is already
-    // there, then it will return true
+    // Default is true, because if the collection and shard is already there, then it will return true
     boolean returnValue = true;
-    String shardsListStr = "";
+    
     List<String> shardsList = new ArrayList<String>();
     for (int i = 0; i < numberOfShards; i++) {
-      if (i != 0) {
-        shardsListStr += ",";
-      }
-      String shard = "shard" + i;
-      shardsListStr += shard;
-      shardsList.add(shard);
+      shardsList.add("shard" + i);
     }
+    String shardsListStr = StringUtils.join(shardsList, ',');
 
     // Check if collection is already in zookeeper
-    if (!allCollectionList.contains(colName)) {
-      logger.info("Creating collection " + colName + ", shardsList="
-        + shardsList + ", solrDetail=" + solrDetail);
+    if (!allCollectionList.contains(collectionName)) {
+      logger.info("Creating collection " + collectionName + ", shardsList=" + shardsList + ", solrDetail=" + solrDetail);
       CollectionAdminRequest.Create collectionCreateRequest = new CollectionAdminRequest.Create();
-      collectionCreateRequest.setCollectionName(colName);
+      collectionCreateRequest.setCollectionName(collectionName);
       collectionCreateRequest.setRouterName("implicit");
       collectionCreateRequest.setShards(shardsListStr);
-      collectionCreateRequest.setMaxShardsPerNode(numberOfShards);
+      collectionCreateRequest.setNumShards(numberOfShards);
       collectionCreateRequest.setReplicationFactor(replicationFactor);
       collectionCreateRequest.setConfigName(configName);
       collectionCreateRequest.setRouterField(ROUTER_FIELD);
-      collectionCreateRequest.setMaxShardsPerNode(replicationFactor
-        * numberOfShards);
+      collectionCreateRequest.setMaxShardsPerNode(replicationFactor * numberOfShards);
 
-      CollectionAdminResponse createResponse = collectionCreateRequest
-        .process(solrClient);
+      CollectionAdminResponse createResponse = collectionCreateRequest.process(solrClient);
       if (createResponse.getStatus() != 0) {
         returnValue = false;
-        logger.error("Error creating collection. collectionName="
-          + colName + ", shardsList=" + shardsList
-          + ", solrDetail=" + solrDetail + ", response="
-          + createResponse);
+        logger.error("Error creating collection. collectionName=" + collectionName + ", shardsList=" + shardsList +
+            ", solrDetail=" + solrDetail + ", response=" + createResponse);
       } else {
-        logger.info("Created collection " + colName + ", shardsList="
-          + shardsList + ", solrDetail=" + solrDetail);
+        logger.info("Created collection " + collectionName + ", shardsList=" + shardsList + ", solrDetail=" + solrDetail);
       }
     } else {
-      logger.info("Collection "
-        + colName
-        + " is already there. Will check whether it has the required shards");
+      logger.info("Collection " + collectionName + " is already there. Will check whether it has the required shards");
       Collection<String> existingShards = getShards();
       for (String shard : shardsList) {
         if (!existingShards.contains(shard)) {
           try {
-            logger.info("Going to add Shard " + shard
-              + " to collection " + collectionName);
+            logger.info("Going to add Shard " + shard + " to collection " + collectionName);
             CollectionAdminRequest.CreateShard createShardRequest = new CollectionAdminRequest.CreateShard();
             createShardRequest.setCollectionName(collectionName);
             createShardRequest.setShardName(shard);
-            CollectionAdminResponse response = createShardRequest
-              .process(solrClient);
+            CollectionAdminResponse response = createShardRequest.process(solrClient);
             if (response.getStatus() != 0) {
-              logger.error("Error creating shard " + shard
-                + " in collection " + collectionName
-                + ", response=" + response
-                + ", solrDetail=" + solrDetail);
+              logger.error("Error creating shard " + shard + " in collection " + collectionName + ", response=" + response +
+                  ", solrDetail=" + solrDetail);
               returnValue = false;
               break;
             } else {
-              logger.info("Successfully created shard " + shard
-                + " in collection " + collectionName);
+              logger.info("Successfully created shard " + shard + " in collection " + collectionName);
             }
           } catch (Throwable t) {
-            logger.error("Error creating shard " + shard
-              + " in collection " + collectionName
-              + ", solrDetail=" + solrDetail, t);
+            logger.error("Error creating shard " + shard + " in collection " + collectionName + ", solrDetail=" + solrDetail, t);
             returnValue = false;
             break;
           }
@@ -480,7 +385,7 @@ public abstract class SolrDaoBase {
     return returnValue;
   }
 
-  public Collection<String> getShards() {
+  private Collection<String> getShards() {
     Collection<String> list = new HashSet<String>();
 
     if (solrClouldClient == null) {
@@ -489,106 +394,76 @@ public abstract class SolrDaoBase {
     }
 
     ZkStateReader reader = solrClouldClient.getZkStateReader();
-    Collection<Slice> slices = reader.getClusterState().getSlices(
-      collectionName);
-    Iterator<Slice> iter = slices.iterator();
-
-    while (iter.hasNext()) {
-      Slice slice = iter.next();
+    Collection<Slice> slices = reader.getClusterState().getSlices(collectionName);
+    for (Slice slice : slices) {
       for (Replica replica : slice.getReplicas()) {
-        logger.info("colName=" + collectionName + ", slice.name="
-          + slice.getName() + ", slice.state=" + slice.getState()
-          + ", replica.core=" + replica.getStr("core")
-          + ", replica.state=" + replica.getStr("state"));
+        logger.info("colName=" + collectionName + ", slice.name=" + slice.getName() + ", slice.state=" + slice.getState() +
+            ", replica.core=" + replica.getStr("core") + ", replica.state=" + replica.getStr("state"));
         list.add(slice.getName());
       }
     }
     return list;
   }
 
-  public boolean createCollection(String colName, String configName,
-                                  int numberOfShards, int replicationFactor,
-                                  List<String> allCollectionList) throws SolrServerException,
-    IOException {
-    // Check if collection is already in zookeeper
-    if (allCollectionList.contains(colName)) {
-      logger.info("Collection " + colName
-        + " is already there. Won't create it");
+  private boolean createCollection(String configName, int numberOfShards, int replicationFactor,
+                                  List<String> allCollectionList) throws SolrServerException, IOException {
+    if (allCollectionList.contains(collectionName)) {
+      logger.info("Collection " + collectionName + " is already there. Won't create it");
       return true;
     }
 
-    logger.info("Creating collection " + colName + ", numberOfShards="
-      + numberOfShards + ", replicationFactor=" + replicationFactor
-      + ", solrDetail=" + solrDetail);
+    logger.info("Creating collection " + collectionName + ", numberOfShards=" + numberOfShards +
+        ", replicationFactor=" + replicationFactor + ", solrDetail=" + solrDetail);
 
     CollectionAdminRequest.Create collectionCreateRequest = new CollectionAdminRequest.Create();
-    collectionCreateRequest.setCollectionName(colName);
+    collectionCreateRequest.setCollectionName(collectionName);
     collectionCreateRequest.setNumShards(numberOfShards);
     collectionCreateRequest.setReplicationFactor(replicationFactor);
     collectionCreateRequest.setConfigName(configName);
-    collectionCreateRequest.setMaxShardsPerNode(replicationFactor
-      * numberOfShards);
-    CollectionAdminResponse createResponse = collectionCreateRequest
-      .process(solrClient);
+    collectionCreateRequest.setMaxShardsPerNode(replicationFactor * numberOfShards);
+    CollectionAdminResponse createResponse = collectionCreateRequest.process(solrClient);
     if (createResponse.getStatus() != 0) {
-      logger.error("Error creating collection. collectionName=" + colName
-        + ", solrDetail=" + solrDetail + ", response="
-        + createResponse);
+      logger.error("Error creating collection. collectionName=" + collectionName + ", solrDetail=" + solrDetail + ", response=" +
+    createResponse);
       return false;
     } else {
-      logger.info("Created collection " + colName + ", numberOfShards="
-        + numberOfShards + ", replicationFactor="
-        + replicationFactor + ", solrDetail=" + solrDetail);
+      logger.info("Created collection " + collectionName + ", numberOfShards=" + numberOfShards +
+          ", replicationFactor=" + replicationFactor + ", solrDetail=" + solrDetail);
       return true;
     }
   }
 
-  public QueryResponse process(SolrQuery solrQuery)
-    throws SolrServerException, IOException {
+  public QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException {
     if (solrClient != null) {
       String event = solrQuery.get("event");
       solrQuery.remove("event");
-      QueryResponse queryResponse = solrClient.query(solrQuery,
-        METHOD.POST);
+      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
 
       if (event != null && !"/audit/logs/live/count".equalsIgnoreCase(event)) {
-        logPerformance.info("\n Username :- "
-          + LogsearchContextUtil.getCurrentUsername()
-          + " Event :- " + event + " SolrQuery :- " + solrQuery
-          + "\nQuery Time Execution :- "
-          + queryResponse.getQTime()
-          + " Total Time Elapsed is :- "
-          + queryResponse.getElapsedTime());
+        logPerformance.info("\n Username :- " + LogSearchContext.getCurrentUsername() + " Event :- " + event + " SolrQuery :- " +
+            solrQuery + "\nQuery Time Execution :- " + queryResponse.getQTime() + " Total Time Elapsed is :- " +
+            queryResponse.getElapsedTime());
       }
       return queryResponse;
     } else {
-      throw restErrorUtil.createRESTException(
-          "Solr configuration improper for " + logType.getLabel() +" logs",
+      throw restErrorUtil.createRESTException("Solr configuration improper for " + logType.getLabel() +" logs",
           MessageEnums.ERROR_SYSTEM);
     }
   }
 
-  public UpdateResponse addDocs(SolrInputDocument doc)
-    throws SolrServerException, IOException, SolrException {
+  public UpdateResponse addDocs(SolrInputDocument doc) throws SolrServerException, IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.add(doc);
-    logPerformance.info("\n Username :- "
-      + LogsearchContextUtil.getCurrentUsername()
-      + " Update Time Execution :- " + updateResoponse.getQTime()
-      + " Total Time Elapsed is :- "
-      + updateResoponse.getElapsedTime());
+    logPerformance.info("\n Username :- " + LogSearchContext.getCurrentUsername() +
+        " Update Time Execution :- " + updateResoponse.getQTime() + " Total Time Elapsed is :- " + updateResoponse.getElapsedTime());
     solrClient.commit();
     return updateResoponse;
   }
 
-  public UpdateResponse removeDoc(String query) throws SolrServerException,
-    IOException, SolrException {
+  public UpdateResponse removeDoc(String query) throws SolrServerException, IOException, SolrException {
     UpdateResponse updateResoponse = solrClient.deleteByQuery(query);
     solrClient.commit();
-    logPerformance.info("\n Username :- "
-      + LogsearchContextUtil.getCurrentUsername()
-      + " Remove Time Execution :- " + updateResoponse.getQTime()
-      + " Total Time Elapsed is :- "
-      + updateResoponse.getElapsedTime());
+    logPerformance.info("\n Username :- " + LogSearchContext.getCurrentUsername() +
+        " Remove Time Execution :- " + updateResoponse.getQTime() + " Total Time Elapsed is :- " + updateResoponse.getElapsedTime());
     return updateResoponse;
   }
 
@@ -605,14 +480,11 @@ public abstract class SolrDaoBase {
   private void populateSchemaFields() {
     if (!populateFieldsThreadActive) {
       populateFieldsThreadActive = true;
-      logger.info("Creating thread to populated fields for collection="
-          + collectionName);
-      Thread fieldPopulationThread = new Thread("populated_fields_"
-          + collectionName) {
+      logger.info("Creating thread to populated fields for collection=" + collectionName);
+      Thread fieldPopulationThread = new Thread("populated_fields_" + collectionName) {
         @Override
         public void run() {
-          logger.info("Started thread to get fields for collection="
-              + collectionName);
+          logger.info("Started thread to get fields for collection=" + collectionName);
           int retryCount = 0;
           while (true) {
             try {
@@ -620,24 +492,19 @@ public abstract class SolrDaoBase {
               retryCount++;
               boolean _result = _populateSchemaFields();
               if (_result) {
-                logger.info("Populate fields for collection " + collectionName
-                    + " is success, Update it after " + SETUP_UPDATE_SECOND
-                    + " sec");
+                logger.info("Populate fields for collection " + collectionName + " is success, Update it after " +
+                    SETUP_UPDATE_SECOND + " sec");
                 Thread.sleep(SETUP_UPDATE_SECOND * 1000);
               }
             } catch (InterruptedException sleepInterrupted) {
-              logger
-                  .info("Sleep interrupted while populating fields for collection "
-                      + collectionName);
+              logger.info("Sleep interrupted while populating fields for collection " + collectionName);
               break;
             } catch (Exception ex) {
-              logger.error("Error while populating fields for collection "
-                  + collectionName + ", retryCount=" + retryCount);
+              logger.error("Error while populating fields for collection " + collectionName + ", retryCount=" + retryCount);
             }
           }
           populateFieldsThreadActive = false;
-          logger.info("Exiting thread for populating fields. collection="
-              + collectionName);
+          logger.info("Exiting thread for populating fields. collection=" + collectionName);
         }
       };
       fieldPopulationThread.setDaemon(true);
@@ -657,16 +524,13 @@ public abstract class SolrDaoBase {
       NamedList<Object> namedList = null;
       try {
         namedList = solrClient.request(request);
-        logger.info("populateSchemaFields() collection="
-          + collectionName + ", fields=" + namedList);
+        logger.info("populateSchemaFields() collection=" + collectionName + ", fields=" + namedList);
       } catch (SolrException | SolrServerException | IOException e) {
-        logger.error(
-          "Error occured while popuplating field. collection="
-            + collectionName, e);
+        logger.error("Error occured while popuplating field. collection=" + collectionName, e);
       }
+      
       if (namedList != null) {
-        ConfigUtil.extractSchemaFieldsName(namedList.toString(),
-          schemaFieldsNameMap,schemaFieldTypeMap);
+        ConfigUtil.extractSchemaFieldsName(namedList.toString(), schemaFieldsNameMap,schemaFieldTypeMap);
         return true;
       }
     }


[46/50] [abbrv] ambari git commit: AMBARI-18246. Clean up Log Feeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java
index ec26a88..ffd6cec 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/FileUtil.java
@@ -20,25 +20,73 @@
 package org.apache.ambari.logfeeder.util;
 
 import java.io.File;
+import java.io.IOException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.attribute.BasicFileAttributes;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
 
 public class FileUtil {
-  private static final Logger logger = Logger.getLogger(FileUtil.class);
-
-  public static List<File> getAllFileFromDir(File directory,
-      String[] searchFileWithExtensions, boolean checkInSubDir) {
+  private static final Logger LOG = Logger.getLogger(FileUtil.class);
+  
+  private FileUtil() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static List<File> getAllFileFromDir(File directory, String extension, boolean checkInSubDir) {
     if (!directory.exists()) {
-      logger.error(directory.getAbsolutePath() + " is not exists ");
-    } else if (directory.isDirectory()) {
-      return (List<File>) FileUtils.listFiles(directory,
-          searchFileWithExtensions, checkInSubDir);
+      LOG.error(directory.getAbsolutePath() + " is not exists ");
+    } else if (!directory.isDirectory()) {
+      LOG.error(directory.getAbsolutePath() + " is not Directory ");
     } else {
-      logger.error(directory.getAbsolutePath() + " is not Directory ");
+      return (List<File>) FileUtils.listFiles(directory, new String[]{extension}, checkInSubDir);
     }
     return new ArrayList<File>();
   }
+
+
+  public static Object getFileKey(File file) {
+    try {
+      Path fileFullPath = Paths.get(file.getAbsolutePath());
+      if (fileFullPath != null) {
+        BasicFileAttributes basicAttr = Files.readAttributes(fileFullPath, BasicFileAttributes.class);
+        return basicAttr.fileKey();
+      }
+    } catch (Throwable ex) {
+      LOG.error("Error getting file attributes for file=" + file, ex);
+    }
+    return file.toString();
+  }
+
+  public static File getFileFromClasspath(String filename) {
+    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader().getResource(filename);
+    LOG.debug("File Complete URI :" + fileCompleteUrl);
+    File file = null;
+    try {
+      file = new File(fileCompleteUrl.toURI());
+    } catch (Exception exception) {
+      LOG.debug(exception.getMessage(), exception.getCause());
+    }
+    return file;
+  }
+
+  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
+    ObjectMapper mapper = new ObjectMapper();
+    try {
+      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {});
+      return jsonmap;
+    } catch (IOException e) {
+      LOG.error(e, e.getCause());
+    }
+    return new HashMap<String, Object>();
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
index 32029ff..5bf600e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
@@ -22,36 +22,23 @@ package org.apache.ambari.logfeeder.util;
 import java.io.BufferedInputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.IOException;
 import java.lang.reflect.Type;
 import java.net.InetAddress;
-import java.net.URL;
 import java.net.UnknownHostException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
 import java.util.HashMap;
 import java.util.Hashtable;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.LogFeeder;
-import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.input.Input;
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.ambari.logfeeder.mapper.Mapper;
-import org.apache.ambari.logfeeder.metrics.MetricCount;
-import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
 
-import com.google.common.collect.ObjectArrays;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.reflect.TypeToken;
@@ -60,109 +47,80 @@ import com.google.gson.reflect.TypeToken;
  * This class contains utility methods used by LogFeeder
  */
 public class LogFeederUtil {
-  private static final Logger logger = Logger.getLogger(LogFeederUtil.class);
+  private static final Logger LOG = Logger.getLogger(LogFeederUtil.class);
 
-  private static final int HASH_SEED = 31174077;
-  public final static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
-  public final static String SOLR_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
-  private static Gson gson = new GsonBuilder().setDateFormat(DATE_FORMAT).create();
-
-  private static Properties props;
-
-  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
-  private static int logInterval = 30000; // 30 seconds
+  private final static String GSON_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss.SSS";
+  private static Gson gson = new GsonBuilder().setDateFormat(GSON_DATE_FORMAT).create();
+  
+  public static Gson getGson() {
+    return gson;
+  }
 
   public static String hostName = null;
   public static String ipAddress = null;
   
-  private static String logfeederTempDir = null;
-  
-  private static final Object _LOCK = new Object();
-  
   static{
-    setHostNameAndIP();
+    try {
+      InetAddress ip = InetAddress.getLocalHost();
+      ipAddress = ip.getHostAddress();
+      String getHostName = ip.getHostName();
+      String getCanonicalHostName = ip.getCanonicalHostName();
+      if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
+        LOG.info("Using getCanonicalHostName()=" + getCanonicalHostName);
+        hostName = getCanonicalHostName;
+      } else {
+        LOG.info("Using getHostName()=" + getHostName);
+        hostName = getHostName;
+      }
+      LOG.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName + ", getCanonicalHostName=" + getCanonicalHostName +
+          ", hostName=" + hostName);
+    } catch (UnknownHostException e) {
+      LOG.error("Error getting hostname.", e);
+    }
   }
   
-  public static Gson getGson() {
-    return gson;
-  }
-
-  private static ThreadLocal<SimpleDateFormat> dateFormatter = new ThreadLocal<SimpleDateFormat>() {
-    @Override
-    protected SimpleDateFormat initialValue() {
-      SimpleDateFormat sdf = new SimpleDateFormat(SOLR_DATE_FORMAT);
-      sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
-      return sdf;
-    }
-  };
+  private static Properties props;
 
   /**
-   * This method will read the properties from System, followed by propFile
-   * and finally from the map
+   * This method will read the properties from System, followed by propFile and finally from the map
    */
-  public static void loadProperties(String propFile, String[] propNVList)
-    throws Exception {
-    logger.info("Loading properties. propFile=" + propFile);
+  public static void loadProperties(String propFile, String[] propNVList) throws Exception {
+    LOG.info("Loading properties. propFile=" + propFile);
     props = new Properties(System.getProperties());
     boolean propLoaded = false;
 
     // First get properties file path from environment value
     String propertiesFilePath = System.getProperty("properties");
-    if (propertiesFilePath != null && !propertiesFilePath.isEmpty()) {
+    if (StringUtils.isNotEmpty(propertiesFilePath)) {
       File propertiesFile = new File(propertiesFilePath);
       if (propertiesFile.exists() && propertiesFile.isFile()) {
-        logger.info("Properties file path set in environment. Loading properties file="
-          + propertiesFilePath);
-        FileInputStream fileInputStream = null;
-        try {
-          fileInputStream = new FileInputStream(propertiesFile);
-          props.load(fileInputStream);
+        LOG.info("Properties file path set in environment. Loading properties file=" + propertiesFilePath);
+        try (FileInputStream fis = new FileInputStream(propertiesFile)) {
+          props.load(fis);
           propLoaded = true;
         } catch (Throwable t) {
-          logger.error("Error loading properties file. properties file="
-            + propertiesFile.getAbsolutePath());
-        } finally {
-          if (fileInputStream != null) {
-            try {
-              fileInputStream.close();
-            } catch (Throwable t) {
-              // Ignore error
-            }
-          }
+          LOG.error("Error loading properties file. properties file=" + propertiesFile.getAbsolutePath());
         }
       } else {
-        logger.error("Properties file path set in environment, but file not found. properties file="
-          + propertiesFilePath);
+        LOG.error("Properties file path set in environment, but file not found. properties file=" + propertiesFilePath);
       }
     }
 
     if (!propLoaded) {
-      BufferedInputStream fileInputStream = null;
-      try {
+      try (BufferedInputStream bis = (BufferedInputStream) LogFeeder.class.getClassLoader().getResourceAsStream(propFile)) {
         // Properties not yet loaded, let's try from class loader
-        fileInputStream = (BufferedInputStream) LogFeeder.class
-          .getClassLoader().getResourceAsStream(propFile);
-        if (fileInputStream != null) {
-          logger.info("Loading properties file " + propFile
-            + " from classpath");
-          props.load(fileInputStream);
+        if (bis != null) {
+          LOG.info("Loading properties file " + propFile + " from classpath");
+          props.load(bis);
           propLoaded = true;
         } else {
-          logger.fatal("Properties file not found in classpath. properties file name= "
-            + propFile);
-        }
-      } finally {
-        if (fileInputStream != null) {
-          try {
-            fileInputStream.close();
-          } catch (IOException e) {
-          }
+          LOG.fatal("Properties file not found in classpath. properties file name= " + propFile);
         }
       }
     }
 
     if (!propLoaded) {
-      logger.fatal("Properties file is not loaded.");
+      LOG.fatal("Properties file is not loaded.");
       throw new Exception("Properties not loaded");
     } else {
       updatePropertiesFromMap(propNVList);
@@ -173,162 +131,124 @@ public class LogFeederUtil {
     if (nvList == null) {
       return;
     }
-    logger.info("Trying to load additional proeprties from argument paramters. nvList.length="
-      + nvList.length);
-    if (nvList != null && nvList.length > 0) {
-      for (String nv : nvList) {
-        logger.info("Passed nv=" + nv);
-        if (nv.startsWith("-") && nv.length() > 1) {
-          nv = nv.substring(1);
-          logger.info("Stripped nv=" + nv);
-          int i = nv.indexOf("=");
-          if (nv.length() > i) {
-            logger.info("Candidate nv=" + nv);
-            String name = nv.substring(0, i);
-            String value = nv.substring(i + 1);
-            logger.info("Adding property from argument to properties. name="
-              + name + ", value=" + value);
-            props.put(name, value);
-          }
+    LOG.info("Trying to load additional proeprties from argument paramters. nvList.length=" + nvList.length);
+    for (String nv : nvList) {
+      LOG.info("Passed nv=" + nv);
+      if (nv.startsWith("-") && nv.length() > 1) {
+        nv = nv.substring(1);
+        LOG.info("Stripped nv=" + nv);
+        int i = nv.indexOf("=");
+        if (nv.length() > i) {
+          LOG.info("Candidate nv=" + nv);
+          String name = nv.substring(0, i);
+          String value = nv.substring(i + 1);
+          LOG.info("Adding property from argument to properties. name=" + name + ", value=" + value);
+          props.put(name, value);
         }
       }
     }
   }
 
-  static public String getStringProperty(String key) {
-    if (props != null) {
-      return props.getProperty(key);
-    }
-    return null;
+  public static String getStringProperty(String key) {
+    return props == null ? null : props.getProperty(key);
   }
 
-  static public String getStringProperty(String key, String defaultValue) {
-    if (props != null) {
-      return props.getProperty(key, defaultValue);
-    }
-    return defaultValue;
+  public static String getStringProperty(String key, String defaultValue) {
+    return props == null ? defaultValue : props.getProperty(key, defaultValue);
   }
 
-  static public boolean getBooleanProperty(String key, boolean defaultValue) {
-    String strValue = getStringProperty(key);
-    return toBoolean(strValue, defaultValue);
+  public static boolean getBooleanProperty(String key, boolean defaultValue) {
+    String value = getStringProperty(key);
+    return toBoolean(value, defaultValue);
   }
 
-  private static boolean toBoolean(String strValue, boolean defaultValue) {
-    boolean retValue = defaultValue;
-    if (!StringUtils.isEmpty(strValue)) {
-      if (strValue.equalsIgnoreCase("true")
-        || strValue.equalsIgnoreCase("yes")) {
-        retValue = true;
-      } else {
-        retValue = false;
-      }
+  private static boolean toBoolean(String value, boolean defaultValue) {
+    if (StringUtils.isEmpty(value)) {
+      return defaultValue;
     }
-    return retValue;
+    
+    return "true".equalsIgnoreCase(value) || "yes".equalsIgnoreCase(value);
   }
 
-  static public int getIntProperty(String key, int defaultValue) {
-    String strValue = getStringProperty(key);
-    int retValue = defaultValue;
-    retValue = objectToInt(strValue, retValue, ", key=" + key);
+  public static int getIntProperty(String key, int defaultValue) {
+    String value = getStringProperty(key);
+    int retValue = objectToInt(value, defaultValue, ", key=" + key);
     return retValue;
   }
 
-  public static int objectToInt(Object objValue, int retValue,
-                                String errMessage) {
+  public static int objectToInt(Object objValue, int retValue, String errMessage) {
     if (objValue == null) {
       return retValue;
     }
     String strValue = objValue.toString();
-    if (!StringUtils.isEmpty(strValue)) {
+    if (StringUtils.isNotEmpty(strValue)) {
       try {
         retValue = Integer.parseInt(strValue);
       } catch (Throwable t) {
-        logger.error("Error parsing integer value. str=" + strValue
-          + ", " + errMessage);
+        LOG.error("Error parsing integer value. str=" + strValue + ", " + errMessage);
       }
     }
     return retValue;
   }
 
-  public static boolean isEnabled(Map<String, Object> conditionConfigs,
-                                  Map<String, Object> valueConfigs) {
-    boolean allow = toBoolean((String) valueConfigs.get("is_enabled"), true);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs
-      .get("conditions");
-    if (conditions != null && conditions.size() > 0) {
-      allow = false;
-      for (String conditionType : conditions.keySet()) {
-        if (conditionType.equalsIgnoreCase("fields")) {
-          @SuppressWarnings("unchecked")
-          Map<String, Object> fields = (Map<String, Object>) conditions
-            .get("fields");
-          for (String fieldName : fields.keySet()) {
-            Object values = fields.get(fieldName);
-            if (values instanceof String) {
-              allow = isFieldConditionMatch(valueConfigs,
-                fieldName, (String) values);
-            } else {
-              @SuppressWarnings("unchecked")
-              List<String> listValues = (List<String>) values;
-              for (String stringValue : listValues) {
-                allow = isFieldConditionMatch(valueConfigs,
-                  fieldName, stringValue);
-                if (allow) {
-                  break;
-                }
-              }
-            }
-            if (allow) {
-              break;
+  @SuppressWarnings("unchecked")
+  public static boolean isEnabled(Map<String, Object> conditionConfigs, Map<String, Object> valueConfigs) {
+    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs.get("conditions");
+    if (MapUtils.isEmpty(conditions)) {
+      return toBoolean((String) valueConfigs.get("is_enabled"), true);
+    }
+    
+    for (String conditionType : conditions.keySet()) {
+      if (!conditionType.equalsIgnoreCase("fields")) {
+        continue;
+      }
+      
+      Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
+      for (Map.Entry<String, Object> field : fields.entrySet()) {
+        if (field.getValue() instanceof String) {
+          if (isFieldConditionMatch(valueConfigs, field.getKey(), (String) field.getValue())) {
+            return true;
+          }
+        } else {
+          for (String stringValue : (List<String>) field.getValue()) {
+            if (isFieldConditionMatch(valueConfigs, field.getKey(), stringValue)) {
+              return true;
             }
           }
         }
-        if (allow) {
-          break;
-        }
       }
     }
-    return allow;
+    
+    return false;
   }
 
-  public static boolean isFieldConditionMatch(Map<String, Object> configs,
-                                              String fieldName, String stringValue) {
+  private static boolean isFieldConditionMatch(Map<String, Object> configs, String fieldName, String stringValue) {
     boolean allow = false;
     String fieldValue = (String) configs.get(fieldName);
     if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
       allow = true;
     } else {
       @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs
-        .get("add_fields");
+      Map<String, Object> addFields = (Map<String, Object>) configs.get("add_fields");
       if (addFields != null && addFields.get(fieldName) != null) {
         String addFieldValue = (String) addFields.get(fieldName);
         if (stringValue.equalsIgnoreCase(addFieldValue)) {
           allow = true;
         }
       }
-
     }
     return allow;
   }
 
-  public static void logStatForMetric(MetricCount metric, String prefixStr,
-                                      String postFix) {
-    long currStat = metric.count;
+  public static void logStatForMetric(MetricData metric, String prefixStr, String postFix) {
+    long currStat = metric.value;
     long currMS = System.currentTimeMillis();
-    if (currStat > metric.prevLogCount) {
-      if (postFix == null) {
-        postFix = "";
-      }
-      logger.info(prefixStr + ": total_count=" + metric.count
-        + ", duration=" + (currMS - metric.prevLogMS) / 1000
-        + " secs, count=" + (currStat - metric.prevLogCount)
-        + postFix);
+    if (currStat > metric.prevLogValue) {
+      LOG.info(prefixStr + ": total_count=" + metric.value + ", duration=" + (currMS - metric.prevLogTime) / 1000 +
+          " secs, count=" + (currStat - metric.prevLogValue) + postFix);
     }
-    metric.prevLogCount = currStat;
-    metric.prevLogMS = currMS;
+    metric.prevLogValue = currStat;
+    metric.prevLogTime = currMS;
   }
 
   public static Map<String, Object> cloneObject(Map<String, Object> map) {
@@ -336,221 +256,74 @@ public class LogFeederUtil {
       return null;
     }
     String jsonStr = gson.toJson(map);
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
     return gson.fromJson(jsonStr, type);
   }
 
   public static Map<String, Object> toJSONObject(String jsonStr) {
-    if(jsonStr==null || jsonStr.trim().isEmpty()){
+    if (StringUtils.isBlank(jsonStr)) {
       return new HashMap<String, Object>();
     }
-    Type type = new TypeToken<Map<String, Object>>() {
-    }.getType();
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
     return gson.fromJson(jsonStr, type);
   }
 
-  static public boolean logErrorMessageByInterval(String key, String message,
-                                                  Throwable e, Logger callerLogger, Level level) {
+  private static class LogHistory {
+    private long lastLogTime = 0;
+    private int counter = 0;
+  }
+
+  private static Map<String, LogHistory> logHistoryList = new Hashtable<String, LogHistory>();
 
+  public static boolean logErrorMessageByInterval(String key, String message, Throwable e, Logger callerLogger, Level level) {
     LogHistory log = logHistoryList.get(key);
     if (log == null) {
       log = new LogHistory();
       logHistoryList.put(key, log);
     }
-    if ((System.currentTimeMillis() - log.lastLogTime) > logInterval) {
+    
+    if ((System.currentTimeMillis() - log.lastLogTime) > 30 * 1000) {
       log.lastLogTime = System.currentTimeMillis();
-      int counter = log.counter;
-      log.counter = 0;
-      if (counter > 0) {
-        message += ". Messages suppressed before: " + counter;
-      }
-      if (e == null) {
-        callerLogger.log(level, message);
-      } else {
-        callerLogger.log(level, message, e);
+      if (log.counter > 0) {
+        message += ". Messages suppressed before: " + log.counter;
       }
+      log.counter = 0;
+      callerLogger.log(level, message, e);
 
       return true;
     } else {
       log.counter++;
-    }
-    return false;
-
-  }
-
-  static public String subString(String str, int maxLength) {
-    if (str == null || str.length() == 0) {
-      return "";
-    }
-    maxLength = str.length() < maxLength ? str.length() : maxLength;
-    return str.substring(0, maxLength);
-  }
-
-  public static long genHash(String value) {
-    if (value == null) {
-      value = "null";
-    }
-    return MurmurHash.hash64A(value.getBytes(), HASH_SEED);
-  }
-
-  private static class LogHistory {
-    private long lastLogTime = 0;
-    private int counter = 0;
-  }
-
-  public static String getDate(String timeStampStr) {
-    try {
-      return dateFormatter.get().format(new Date(Long.parseLong(timeStampStr)));
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
+      return false;
     }
   }
 
-  public static String getActualDateStr() {
-    try {
-      return dateFormatter.get().format(new Date());
-    } catch (Exception ex) {
-      logger.error(ex);
-      return null;
-    }
-  }
-
-  public static File getFileFromClasspath(String filename) {
-    URL fileCompleteUrl = Thread.currentThread().getContextClassLoader()
-      .getResource(filename);
-    logger.debug("File Complete URI :" + fileCompleteUrl);
-    File file = null;
-    try {
-      file = new File(fileCompleteUrl.toURI());
-    } catch (Exception exception) {
-      logger.debug(exception.getMessage(), exception.getCause());
-    }
-    return file;
-  }
-
-  public static Object getClassInstance(String classFullName, AliasUtil.ALIAS_TYPE aliasType) {
-    Object instance = null;
-    try {
-      instance = (Object) Class.forName(classFullName).getConstructor().newInstance();
-    } catch (Exception exception) {
-      logger.error("Unsupported class =" + classFullName, exception.getCause());
+  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
+    if (list == null) {
+      return false;
     }
-    // check instance class as par aliasType
-    if (instance != null) {
-      boolean isValid = false;
-      switch (aliasType) {
-        case FILTER:
-          isValid = Filter.class.isAssignableFrom(instance.getClass());
-          break;
-        case INPUT:
-          isValid = Input.class.isAssignableFrom(instance.getClass());
-          break;
-        case OUTPUT:
-          isValid = Output.class.isAssignableFrom(instance.getClass());
-          break;
-        case MAPPER:
-          isValid = Mapper.class.isAssignableFrom(instance.getClass());
-          break;
-        default:
-          // by default consider all are valid class
-          isValid = true;
+    
+    for (String value : list) {
+      if (value == null) {
+        continue;
       }
-      if (!isValid) {
-        logger.error("Not a valid class :" + classFullName + " AliasType :" + aliasType.name());
-      }
-    }
-    return instance;
-  }
-
-  public static HashMap<String, Object> readJsonFromFile(File jsonFile) {
-    ObjectMapper mapper = new ObjectMapper();
-    try {
-      HashMap<String, Object> jsonmap = mapper.readValue(jsonFile, new TypeReference<HashMap<String, Object>>() {
-      });
-      return jsonmap;
-    } catch (JsonParseException e) {
-      logger.error(e, e.getCause());
-    } catch (JsonMappingException e) {
-      logger.error(e, e.getCause());
-    } catch (IOException e) {
-      logger.error(e, e.getCause());
-    }
-    return new HashMap<String, Object>();
-  }
-
-  public static boolean isListContains(List<String> list, String str, boolean caseSensitive) {
-    if (list != null) {
-      for (String value : list) {
-        if (value != null) {
-          if (caseSensitive) {
-            if (value.equals(str)) {
-              return true;
-            }
-          } else {
-            if (value.equalsIgnoreCase(str)) {
-              return true;
-            }
-          }
-          if (value.equalsIgnoreCase(LogFeederConstants.ALL)) {
-            return true;
-          }
-        }
+      
+      if (caseSensitive ? value.equals(str) : value.equalsIgnoreCase(str) ||
+          value.equalsIgnoreCase(LogFeederConstants.ALL)) {
+        return true;
       }
     }
     return false;
   }
   
+  private static String logfeederTempDir = null;
   
-  private static synchronized String setHostNameAndIP() {
-    if (hostName == null || ipAddress == null) {
-      try {
-        InetAddress ip = InetAddress.getLocalHost();
-        ipAddress = ip.getHostAddress();
-        String getHostName = ip.getHostName();
-        String getCanonicalHostName = ip.getCanonicalHostName();
-        if (!getCanonicalHostName.equalsIgnoreCase(ipAddress)) {
-          logger.info("Using getCanonicalHostName()=" + getCanonicalHostName);
-          hostName = getCanonicalHostName;
-        } else {
-          logger.info("Using getHostName()=" + getHostName);
-          hostName = getHostName;
-        }
-        logger.info("ipAddress=" + ipAddress + ", getHostName=" + getHostName
-            + ", getCanonicalHostName=" + getCanonicalHostName + ", hostName="
-            + hostName);
-      } catch (UnknownHostException e) {
-        logger.error("Error getting hostname.", e);
-      }
-    }
-    return hostName;
-  }
-
-  public static String[] mergeArray(String[] first, String[] second) {
-    if (first == null) {
-      first = new String[0];
-    }
-    if (second == null) {
-      second = new String[0];
-    }
-    String[] mergedArray = ObjectArrays.concat(first, second, String.class);
-    return mergedArray;
-  }
-  
-  public static String getLogfeederTempDir() {
+  public synchronized static String getLogfeederTempDir() {
     if (logfeederTempDir == null) {
-      synchronized (_LOCK) {
-        if (logfeederTempDir == null) {
-          String tempDirValue = getStringProperty("logfeeder.tmp.dir",
-              "/tmp/$username/logfeeder/");
-          HashMap<String, String> contextParam = new HashMap<String, String>();
-          String username = System.getProperty("user.name");
-          contextParam.put("username", username);
-          logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue,
-              contextParam);
-        }
-      }
+      String tempDirValue = getStringProperty("logfeeder.tmp.dir", "/tmp/$username/logfeeder/");
+      HashMap<String, String> contextParam = new HashMap<String, String>();
+      String username = System.getProperty("user.name");
+      contextParam.put("username", username);
+      logfeederTempDir = PlaceholderUtil.replaceVariables(tempDirValue, contextParam);
     }
     return logfeederTempDir;
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java
index fd96f8a..c975b99 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogfeederHDFSUtil.java
@@ -25,71 +25,53 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.log4j.Logger;
 
-public enum LogfeederHDFSUtil {
-  INSTANCE;
-  private static Logger logger = Logger.getLogger(LogfeederHDFSUtil.class);
+public class LogfeederHDFSUtil {
+  private static final Logger LOG = Logger.getLogger(LogfeederHDFSUtil.class);
 
-  public void createHDFSDir(String dirPath, FileSystem dfs) {
-    Path src = new Path(dirPath);
-    try {
-      if (dfs.isDirectory(src)) {
-        logger.info("hdfs dir dirPath=" + dirPath + "  is already exist.");
-        return;
-      }
-      boolean isDirCreated = dfs.mkdirs(src);
-      if (isDirCreated) {
-        logger.debug("HDFS dirPath=" + dirPath + " created successfully.");
-      } else {
-        logger.warn("HDFS dir creation failed dirPath=" + dirPath);
-      }
-    } catch (IOException e) {
-      logger.error("HDFS dir creation failed dirPath=" + dirPath, e.getCause());
-    }
+  private LogfeederHDFSUtil() {
+    throw new UnsupportedOperationException();
   }
-
-  public boolean copyFromLocal(String sourceFilepath, String destFilePath,
-      FileSystem fileSystem, boolean overwrite, boolean delSrc) {
+  
+  public static boolean copyFromLocal(String sourceFilepath, String destFilePath, FileSystem fileSystem, boolean overwrite,
+      boolean delSrc) {
     Path src = new Path(sourceFilepath);
     Path dst = new Path(destFilePath);
     boolean isCopied = false;
     try {
-      logger.info("copying localfile := " + sourceFilepath + " to hdfsPath := "
-          + destFilePath);
+      LOG.info("copying localfile := " + sourceFilepath + " to hdfsPath := " + destFilePath);
       fileSystem.copyFromLocalFile(delSrc, overwrite, src, dst);
       isCopied = true;
     } catch (Exception e) {
-      logger.error("Error copying local file :" + sourceFilepath
-          + " to hdfs location : " + destFilePath, e);
+      LOG.error("Error copying local file :" + sourceFilepath + " to hdfs location : " + destFilePath, e);
     }
     return isCopied;
   }
 
-  public FileSystem buildFileSystem(String hdfsHost, String hdfsPort) {
+  public static FileSystem buildFileSystem(String hdfsHost, String hdfsPort) {
     try {
       Configuration configuration = buildHdfsConfiguration(hdfsHost, hdfsPort);
       FileSystem fs = FileSystem.get(configuration);
       return fs;
     } catch (Exception e) {
-      logger.error("Exception is buildFileSystem :", e);
+      LOG.error("Exception is buildFileSystem :", e);
     }
     return null;
   }
 
-  public void closeFileSystem(FileSystem fileSystem) {
+  private static Configuration buildHdfsConfiguration(String hdfsHost, String hdfsPort) {
+    String url = "hdfs://" + hdfsHost + ":" + hdfsPort + "/";
+    Configuration configuration = new Configuration();
+    configuration.set("fs.default.name", url);
+    return configuration;
+  }
+
+  public static void closeFileSystem(FileSystem fileSystem) {
     if (fileSystem != null) {
       try {
         fileSystem.close();
       } catch (IOException e) {
-        logger.error(e.getLocalizedMessage(), e.getCause());
+        LOG.error(e.getLocalizedMessage(), e.getCause());
       }
     }
   }
-
-  public Configuration buildHdfsConfiguration(String hdfsHost, String hdfsPort) {
-    String url = "hdfs://" + hdfsHost + ":" + hdfsPort + "/";
-    Configuration configuration = new Configuration();
-    configuration.set("fs.default.name", url);
-    return configuration;
-  }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java
index d6c3117..13f2865 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/PlaceholderUtil.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -21,38 +21,34 @@ import java.util.HashMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-public class PlaceholderUtil {
+import org.apache.commons.lang3.StringUtils;
 
-  private static Pattern placeHolderPattern;
-  static {
-    placeHolderPattern = Pattern.compile("\\$\\s*(\\w+)");
+public class PlaceholderUtil {
+  private PlaceholderUtil() {
+    throw new UnsupportedOperationException();
   }
+  
+  private static final Pattern placeHolderPattern = Pattern.compile("\\$\\s*(\\w+)");
 
-  public static String replaceVariables(String inputStr,
-      HashMap<String, String> contextParam) {
+  public static String replaceVariables(String inputStr, HashMap<String, String> contextParam) {
     Matcher m = placeHolderPattern.matcher(inputStr);
-    String placeholder;
-    String replacement;
     String output = new String(inputStr);
     while (m.find()) {
-      placeholder = m.group();
+      String placeholder = m.group();
       if (placeholder != null && !placeholder.isEmpty()) {
-        String key = placeholder.replace("$","").toLowerCase();// remove
-                                                                   // brace
-        replacement = getFromContext(contextParam, placeholder, key);
+        String key = placeholder.replace("$","").toLowerCase();// remove brace
+        String replacement = getFromContext(contextParam, placeholder, key);
         output = output.replace(placeholder, replacement);
       }
     }
     return output;
   }
 
-  private static String getFromContext(HashMap<String, String> contextParam,
-      String defaultValue, String key) {
-    String returnValue = defaultValue;// by default set default value as a
-                                      // return
+  private static String getFromContext(HashMap<String, String> contextParam, String defaultValue, String key) {
+    String returnValue = defaultValue; // by default set default value as a return
     if (contextParam != null) {
       String value = contextParam.get(key);
-      if (value != null && !value.trim().isEmpty()) {
+      if (StringUtils.isNotBlank(value)) {
         returnValue = value;
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
index 10ea2c2..31a38d0 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/S3Util.java
@@ -19,7 +19,6 @@
 package org.apache.ambari.logfeeder.util;
 
 import java.io.BufferedReader;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -27,6 +26,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.zip.GZIPInputStream;
 
+import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
@@ -39,22 +39,19 @@ import com.amazonaws.services.s3.model.ObjectMetadata;
 import com.amazonaws.services.s3.model.PutObjectRequest;
 import com.amazonaws.services.s3.model.S3Object;
 import com.amazonaws.services.s3.transfer.TransferManager;
-import com.amazonaws.services.s3.transfer.Upload;
 
 /**
  * Utility to connect to s3
  */
 public class S3Util {
-  public static final S3Util INSTANCE = new S3Util();
-
   private static final Logger LOG = Logger.getLogger(S3Util.class);
 
-  public static final String S3_PATH_START_WITH = "s3://";
-  public static final String S3_PATH_SEPARATOR = "/";
-
-  public AmazonS3 getS3Client(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
+  private S3Util() {
+    throw new UnsupportedOperationException();
+  }
+  
+  public static AmazonS3 getS3Client(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey);
     AmazonS3 s3client;
     if (awsCredentials != null) {
       s3client = new AmazonS3Client(awsCredentials);
@@ -64,9 +61,8 @@ public class S3Util {
     return s3client;
   }
 
-  public TransferManager getTransferManager(String accessKey, String secretKey) {
-    AWSCredentials awsCredentials = AWSUtil.INSTANCE.createAWSCredentials(
-        accessKey, secretKey);
+  public static TransferManager getTransferManager(String accessKey, String secretKey) {
+    AWSCredentials awsCredentials = AWSUtil.createAWSCredentials(accessKey, secretKey);
     TransferManager transferManager;
     if (awsCredentials != null) {
       transferManager = new TransferManager(awsCredentials);
@@ -76,35 +72,31 @@ public class S3Util {
     return transferManager;
   }
 
-  public void shutdownTransferManager(TransferManager transferManager) {
+  public static void shutdownTransferManager(TransferManager transferManager) {
     if (transferManager != null) {
       transferManager.shutdownNow();
     }
   }
 
-  public String getBucketName(String s3Path) {
+  public static String getBucketName(String s3Path) {
     String bucketName = null;
     // s3path
     if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
+      String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR);
       bucketName = s3PathParts[0];
     }
     return bucketName;
   }
 
-  public String getS3Key(String s3Path) {
+  public static String getS3Key(String s3Path) {
     StringBuilder s3Key = new StringBuilder();
-    // s3path
     if (s3Path != null) {
-      String[] s3PathParts = s3Path.replace(S3_PATH_START_WITH, "").split(
-          S3_PATH_SEPARATOR);
-      ArrayList<String> s3PathList = new ArrayList<String>(
-          Arrays.asList(s3PathParts));
+      String[] s3PathParts = s3Path.replace(LogFeederConstants.S3_PATH_START_WITH, "").split(LogFeederConstants.S3_PATH_SEPARATOR);
+      ArrayList<String> s3PathList = new ArrayList<String>(Arrays.asList(s3PathParts));
       s3PathList.remove(0);// remove bucketName
       for (int index = 0; index < s3PathList.size(); index++) {
         if (index > 0) {
-          s3Key.append(S3_PATH_SEPARATOR);
+          s3Key.append(LogFeederConstants.S3_PATH_SEPARATOR);
         }
         s3Key.append(s3PathList.get(index));
       }
@@ -112,63 +104,41 @@ public class S3Util {
     return s3Key.toString();
   }
 
-  public void uploadFileTos3(String bucketName, String s3Key, File localFile,
-      String accessKey, String secretKey) {
-    TransferManager transferManager = getTransferManager(accessKey, secretKey);
-    try {
-      Upload upload = transferManager.upload(bucketName, s3Key, localFile);
-      upload.waitForUploadResult();
-    } catch (AmazonClientException | InterruptedException e) {
-      LOG.error("s3 uploading failed for file :" + localFile.getAbsolutePath(),
-          e);
-    } finally {
-      shutdownTransferManager(transferManager);
-    }
-  }
-
   /**
    * Get the buffer reader to read s3 file as a stream
    */
-  public BufferedReader getReader(String s3Path, String accessKey,
-      String secretKey) throws IOException {
+  public static BufferedReader getReader(String s3Path, String accessKey, String secretKey) throws IOException {
     // TODO error handling
     // Compression support
     // read header and decide the compression(auto detection)
     // For now hard-code GZIP compression
     String s3Bucket = getBucketName(s3Path);
     String s3Key = getS3Key(s3Path);
-    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(
-        new GetObjectRequest(s3Bucket, s3Key));
-    GZIPInputStream objectInputStream;
+    S3Object fileObj = getS3Client(accessKey, secretKey).getObject(new GetObjectRequest(s3Bucket, s3Key));
     try {
-      objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
-      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(
-          objectInputStream));
+      GZIPInputStream objectInputStream = new GZIPInputStream(fileObj.getObjectContent());
+      BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(objectInputStream));
       return bufferedReader;
     } catch (IOException e) {
-      LOG.error("Error in creating stream reader for s3 file :" + s3Path,
-          e.getCause());
+      LOG.error("Error in creating stream reader for s3 file :" + s3Path, e.getCause());
       throw e;
     }
   }
 
-  public void writeIntoS3File(String data, String bucketName, String s3Key,
-      String accessKey, String secretKey) {
+  public static void writeIntoS3File(String data, String bucketName, String s3Key, String accessKey, String secretKey) {
     InputStream in = null;
     try {
       in = IOUtils.toInputStream(data, "UTF-8");
     } catch (IOException e) {
       LOG.error(e);
     }
+    
     if (in != null) {
       TransferManager transferManager = getTransferManager(accessKey, secretKey);
       try {
         if (transferManager != null) {
-          transferManager.upload(
-                  new PutObjectRequest(bucketName, s3Key, in,
-                  new ObjectMetadata())).waitForUploadResult();
-          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :"
-              + bucketName);
+          transferManager.upload(new PutObjectRequest(bucketName, s3Key, in, new ObjectMetadata())).waitForUploadResult();
+          LOG.debug("Data Uploaded to s3 file :" + s3Key + " in bucket :" + bucketName);
         }
       } catch (AmazonClientException | InterruptedException e) {
         LOG.error(e);
@@ -182,5 +152,4 @@ public class S3Util {
       }
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
deleted file mode 100644
index 44113e1..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.util;
-
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.apache.ambari.logfeeder.logconfig.LogFeederConstants;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.SolrRequest.METHOD;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.CollectionAdminResponse;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.common.SolrDocument;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrException;
-
-public class SolrUtil {
-
-  private static final Logger logger = Logger.getLogger(SolrUtil.class);
-
-  private static SolrUtil instance = null;
-  
-  private SolrClient solrClient = null;
-  private CloudSolrClient solrClouldClient = null;
-
-  private String solrDetail = "";
-
-  private SolrUtil() throws Exception {
-    String url = LogFeederUtil.getStringProperty("logfeeder.solr.url");
-    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
-    String collection = LogFeederUtil.getStringProperty("logfeeder.solr.core.config.name", "history");
-    connectToSolr(url, zkConnectString, collection);
-  }
-
-  public static SolrUtil getInstance() {
-    if (instance == null) {
-      synchronized (SolrUtil.class) {
-        if (instance == null) {
-          try {
-            instance = new SolrUtil();
-          } catch (Exception e) {
-            final String LOG_MESSAGE_KEY = SolrUtil.class
-                .getSimpleName() + "_SOLR_UTIL";
-              LogFeederUtil.logErrorMessageByInterval(
-                LOG_MESSAGE_KEY,
-                "Error constructing solrUtil", e, logger,
-                Level.WARN);
-          }
-        }
-      }
-    }
-    return instance;
-  }
-
-  private SolrClient connectToSolr(String url, String zkConnectString,
-                                  String collection) throws Exception {
-    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection
-      + ", url=" + url;
-
-    logger.info("connectToSolr() " + solrDetail);
-    if (collection == null || collection.isEmpty()) {
-      throw new Exception("For solr, collection name is mandatory. "
-        + solrDetail);
-    }
-    if (zkConnectString != null && !zkConnectString.isEmpty()) {
-      solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
-      logger.info("Using zookeepr. " + solrDetail);
-      solrClouldClient = new CloudSolrClient(zkConnectString);
-      solrClouldClient.setDefaultCollection(collection);
-      solrClient = solrClouldClient;
-      int waitDurationMS = 3 * 60 * 1000;
-      checkSolrStatus(waitDurationMS);
-    } else {
-      if (url == null || url.trim().isEmpty()) {
-        throw new Exception("Both zkConnectString and URL are empty. zkConnectString="
-          + zkConnectString + ", collection=" + collection + ", url="
-          + url);
-      }
-      solrDetail = "collection=" + collection + ", url=" + url;
-      String collectionURL = url + "/" + collection;
-      logger.info("Connecting to  solr : " + collectionURL);
-      solrClient = new HttpSolrClient(collectionURL);
-
-    }
-    return solrClient;
-  }
-
-  private boolean checkSolrStatus(int waitDurationMS) {
-    boolean status = false;
-    try {
-      long beginTimeMS = System.currentTimeMillis();
-      long waitIntervalMS = 2000;
-      int pingCount = 0;
-      while (true) {
-        pingCount++;
-        CollectionAdminResponse response = null;
-        try {
-          CollectionAdminRequest.List colListReq = new CollectionAdminRequest.List();
-          response = colListReq.process(solrClient);
-        } catch (Exception ex) {
-          logger.error("Con't connect to Solr. solrDetail=" + solrDetail, ex);
-        }
-        if (response != null && response.getStatus() == 0) {
-          logger.info("Solr getCollections() is success. solr=" + solrDetail);
-          status = true;
-          break;
-        }
-        if (System.currentTimeMillis() - beginTimeMS > waitDurationMS) {
-          logger.error("Solr is not reachable even after "
-            + (System.currentTimeMillis() - beginTimeMS)
-            + " ms. If you are using alias, then you might have to restart LogSearch after Solr is up and running. solr="
-            + solrDetail + ", response=" + response);
-          break;
-        } else {
-          logger.warn("Solr is not reachable yet. getCollections() attempt count=" + pingCount
-            + ". Will sleep for " + waitIntervalMS + " ms and try again." + " solr=" + solrDetail
-            + ", response=" + response);
-
-        }
-        Thread.sleep(waitIntervalMS);
-      }
-    } catch (Throwable t) {
-      logger.error("Seems Solr is not up. solrDetail=" + solrDetail);
-    }
-    return status;
-  }
-
-  private QueryResponse process(SolrQuery solrQuery) throws SolrServerException, IOException, SolrException {
-    if (solrClient != null) {
-      QueryResponse queryResponse = solrClient.query(solrQuery, METHOD.POST);
-      return queryResponse;
-    } else {
-      logger.error("solrClient can't be null");
-      return null;
-    }
-  }
-
-  public HashMap<String, Object> getConfigDoc() {
-    HashMap<String, Object> configMap = new HashMap<String, Object>();
-    SolrQuery solrQuery = new SolrQuery();
-    solrQuery.setQuery("*:*");
-    String fq = LogFeederConstants.ROW_TYPE + ":" + LogFeederConstants.LOGFEEDER_FILTER_NAME;
-    solrQuery.setFilterQueries(fq);
-    try {
-      QueryResponse response = process(solrQuery);
-      if (response != null) {
-        SolrDocumentList documentList = response.getResults();
-        if (documentList != null && documentList.size() > 0) {
-          SolrDocument configDoc = documentList.get(0);
-          String configJson = LogFeederUtil.getGson().toJson(configDoc);
-          configMap = (HashMap<String, Object>) LogFeederUtil
-              .toJSONObject(configJson);
-        }
-      }
-    } catch (Exception e) {
-      final String logMessageKey = this.getClass().getSimpleName()
-          + "_FETCH_FILTER_CONFIG_ERROR";
-      LogFeederUtil.logErrorMessageByInterval(logMessageKey,
-          "Error getting filter config from solr", e, logger, Level.ERROR);
-    }
-    return configMap;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java
deleted file mode 100644
index f030040..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilter.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.view;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VLogfeederFilter {
-
-  private String label;
-  private List<String> hosts;
-  private List<String> defaultLevels;
-  private List<String> overrideLevels;
-  private String expiryTime;
-
-  public VLogfeederFilter() {
-    hosts = new ArrayList<String>();
-    defaultLevels = new ArrayList<String>();
-    overrideLevels = new ArrayList<String>();
-  }
-
-  public String getLabel() {
-    return label;
-  }
-
-  public void setLabel(String label) {
-    this.label = label;
-  }
-
-  public List<String> getHosts() {
-    return hosts;
-  }
-
-  public void setHosts(List<String> hosts) {
-    this.hosts = hosts;
-  }
-
-  public List<String> getDefaultLevels() {
-    return defaultLevels;
-  }
-
-  public void setDefaultLevels(List<String> defaultLevels) {
-    this.defaultLevels = defaultLevels;
-  }
-
-  public List<String> getOverrideLevels() {
-    return overrideLevels;
-  }
-
-  public void setOverrideLevels(List<String> overrideLevels) {
-    this.overrideLevels = overrideLevels;
-  }
-
-  public String getExpiryTime() {
-    return expiryTime;
-  }
-
-  public void setExpiryTime(String expiryTime) {
-    this.expiryTime = expiryTime;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java
deleted file mode 100644
index 4ddef3f..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/view/VLogfeederFilterWrapper.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder.view;
-
-import java.util.HashMap;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlRootElement;
-
-import org.codehaus.jackson.annotate.JsonAutoDetect;
-import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-@JsonAutoDetect(getterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE, fieldVisibility = Visibility.ANY)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class VLogfeederFilterWrapper {
-
-  private HashMap<String, VLogfeederFilter> filter;
-  private String id;
-
-  public HashMap<String, VLogfeederFilter> getFilter() {
-    return filter;
-  }
-
-  public void setFilter(HashMap<String, VLogfeederFilter> filter) {
-    this.filter = filter;
-  }
-
-  public String getId() {
-    return id;
-  }
-
-  public void setId(String id) {
-    this.id = id;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java
deleted file mode 100644
index 193cb48..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/AppTest.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logfeeder;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.ambari.logfeeder.filter.FilterGrok;
-import org.apache.log4j.Logger;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-/**
- * Unit test for simple App.
- */
-public class AppTest extends TestCase {
-  static Logger logger = Logger.getLogger(AppTest.class);
-
-  /**
-   * Create the test case
-   *
-   * @param testName name of the test case
-   */
-  public AppTest(String testName) {
-    super(testName);
-  }
-
-  /**
-   * @return the suite of tests being tested
-   */
-  public static Test suite() {
-    return new TestSuite(AppTest.class);
-  }
-
-  /**
-   * Rigourous Test :-)
-   */
-  public void testApp() {
-    assertTrue(true);
-  }
-
-  public void testGrok() {
-    logger.info("testGrok()");
-    FilterGrok grokFilter = new FilterGrok();
-    try {
-      Map<String, Object> map = new HashMap<String, Object>();
-      map.put("message_pattern",
-        "^%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
-      grokFilter.loadConfig(map);
-      grokFilter.init();
-      String out = grokFilter.grokParse("INFO This is a test");
-      logger.info("out=" + out);
-
-    } catch (Exception e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-      assertFalse(true);
-    }
-
-    assertTrue(true);
-  }
-
-  public void testGrokUGI() {
-    logger.info("testGrok()");
-    String[] ugis = new String[]{"user1@xyz.com (auth:TOKEN)",
-      "ambari-qa@example.com (auth:kerberos)",
-      "my_user@example.com (auth:kerberos)",
-      "hive/bdurai-dojran-2.novalocal@example.com (auth:kerberos)",
-      "just_me",
-      "ambari-qa (auth:PROXY) via hive/myhost.novalocal@EXAMPLE.COM (auth:KERBEROS)"};
-
-    FilterGrok grokFilter = new FilterGrok();
-    try {
-      Map<String, Object> map = new HashMap<String, Object>();
-      // map.put("message_pattern",
-      // "(?<user>([\\w\\d\\-]+))\\/|(?<user>([\\w\\d\\-]+))@|(?<user>([\\w\\d\\-]+))/[\\w\\d\\-.]+@|(?<user>([\\w\\d.\\-_]+))[\\s(]+");
-      // map.put("message_pattern",
-      // "(?<user>([\\w\\d\\-]+))/[\\w\\d\\-.]+@");
-      // *(auth:(?<auth>[\\w\\d\\-]+))
-      // GOOD: map.put("message_pattern", "(?<user>([\\w\\d\\-]+)).+auth:(?<auth>([\\w\\d\\-]+))");
-      // OK: map.put("message_pattern", "(?<user>([\\w\\d\\-]+)).+auth:(?<auth>([\\w\\d\\-]+))|%{USERNAME:xuser}");
-      //map.put("message_pattern", "%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}");
-      map.put("message_pattern", "%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}");
-      grokFilter.loadConfig(map);
-      grokFilter.init();
-      for (String ugi : ugis) {
-        String out = grokFilter.grokParse(ugi);
-        logger.info(ugi + "=" + out);
-      }
-
-    } catch (Exception e) {
-      // TODO Auto-generated catch block
-      e.printStackTrace();
-      assertFalse(true);
-    }
-    assertTrue(true);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
index 3aa8d7b..99565c5 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -23,7 +23,7 @@ import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -40,16 +40,16 @@ public class FilterGrokTest {
   private static final Logger LOG = Logger.getLogger(FilterGrokTest.class);
 
   private FilterGrok filterGrok;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> config) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterGrok = new FilterGrok();
     filterGrok.loadConfig(config);
-    filterGrok.setOutputMgr(mockOutputMgr);
+    filterGrok.setOutputManager(mockOutputManager);
     filterGrok.setInput(EasyMock.mock(Input.class));
     filterGrok.init();
   }
@@ -59,19 +59,18 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_parseMessage()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertNotNull(jsonParams);
@@ -86,23 +85,22 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_parseMultiLineMessage()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     String multiLineMessage = "This is a multiline test message\r\n" + "having multiple lines\r\n"
         + "as one may expect";
     String[] messageLines = multiLineMessage.split("\r\n");
     for (int i = 0; i < messageLines.length; i++)
-      filterGrok.apply((i == 0 ? "2016-04-08 15:55:23,548 INFO " : "") + messageLines[i], new InputMarker());
+      filterGrok.apply((i == 0 ? "2016-04-08 15:55:23,548 INFO " : "") + messageLines[i], new InputMarker(null, null, 0));
     filterGrok.flush();
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertNotNull(jsonParams);
@@ -117,19 +115,18 @@ public class FilterGrokTest {
     LOG.info("testFilterGrok_notMatchingMesagePattern()");
 
     Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern",
-        "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("04/08/2016 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("04/08/2016 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("04/08/2016 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("04/08/2016 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured!", capture.hasCaptured());
   }
 
@@ -141,12 +138,12 @@ public class FilterGrokTest {
     config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
     init(config);
 
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker());
-    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker());
+    filterGrok.apply("2016-04-08 15:55:23,548 INFO This is a test", new InputMarker(null, null, 0));
+    filterGrok.apply("2016-04-08 15:55:24,548 WARN Next message", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured", capture.hasCaptured());
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
index 64e9b69..06d8db2 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -27,8 +27,7 @@ import java.util.TimeZone;
 
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
-import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -44,16 +43,16 @@ public class FilterJSONTest {
   private static final Logger LOG = Logger.getLogger(FilterJSONTest.class);
 
   private FilterJSON filterJson;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> params) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterJson = new FilterJSON();
     filterJson.loadConfig(params);
-    filterJson.setOutputMgr(mockOutputMgr);
+    filterJson.setOutputManager(mockOutputManager);
     filterJson.init();
   }
 
@@ -63,17 +62,17 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     Date d = new Date();
-    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
     sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
     String dateString = sdf.format(d);
-    filterJson.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputMarker());
+    filterJson.apply("{ logtime: '" + d.getTime() + "', line_number: 100 }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime"));
@@ -87,17 +86,17 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
     Date d = new Date();
-    DateFormat sdf = new SimpleDateFormat(LogFeederUtil.SOLR_DATE_FORMAT);
+    DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
     sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
     String dateString = sdf.format(d);
-    filterJson.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputMarker());
+    filterJson.apply("{ logtime: '" + d.getTime() + "', some_field: 'abc' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: log time", dateString, jsonParams.remove("logtime"));
@@ -111,13 +110,13 @@ public class FilterJSONTest {
 
     init(new HashMap<String, Object>());
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterJson.apply("{ line_number: 100, some_field: 'abc' }", new InputMarker());
+    filterJson.apply("{ line_number: 100, some_field: 'abc' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Incorrect decoding: line number", 100l, jsonParams.remove("line_number"));
@@ -132,7 +131,7 @@ public class FilterJSONTest {
     init(new HashMap<String, Object>());
     String inputStr="invalid json";
     try{
-    filterJson.apply(inputStr,new InputMarker());
+    filterJson.apply(inputStr,new InputMarker(null, null, 0));
     fail("Expected LogfeederException was not occured");
     }catch(LogfeederException logfeederException){
       assertEquals("Json parsing failed for inputstr = "+inputStr, logfeederException.getLocalizedMessage());

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
index 849e4c3..30cee42 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,7 +22,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.output.OutputMgr;
+import org.apache.ambari.logfeeder.output.OutputManager;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -38,16 +38,16 @@ public class FilterKeyValueTest {
   private static final Logger LOG = Logger.getLogger(FilterKeyValueTest.class);
 
   private FilterKeyValue filterKeyValue;
-  private OutputMgr mockOutputMgr;
+  private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
   public void init(Map<String, Object> config) throws Exception {
-    mockOutputMgr = EasyMock.strictMock(OutputMgr.class);
+    mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterKeyValue = new FilterKeyValue();
     filterKeyValue.loadConfig(config);
-    filterKeyValue.setOutputMgr(mockOutputMgr);
+    filterKeyValue.setOutputManager(mockOutputManager);
     filterKeyValue.init();
   }
 
@@ -61,13 +61,13 @@ public class FilterKeyValueTest {
     // using default value split:
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("keyValueField"));
@@ -85,13 +85,13 @@ public class FilterKeyValueTest {
     // using default value split: =
     init(config);
 
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ keyValueField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     assertFalse("Something was captured!", capture.hasCaptured());
   }
 
@@ -105,13 +105,13 @@ public class FilterKeyValueTest {
     init(config);
 
     // using default value split: =
-    mockOutputMgr.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
+    mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
-    EasyMock.replay(mockOutputMgr);
+    EasyMock.replay(mockOutputManager);
 
-    filterKeyValue.apply("{ otherField: 'name1=value1&name2=value2' }", new InputMarker());
+    filterKeyValue.apply("{ otherField: 'name1=value1&name2=value2' }", new InputMarker(null, null, 0));
 
-    EasyMock.verify(mockOutputMgr);
+    EasyMock.verify(mockOutputManager);
     Map<String, Object> jsonParams = capture.getValue();
 
     assertEquals("Original missing!", "name1=value1&name2=value2", jsonParams.remove("otherField"));

http://git-wip-us.apache.org/repos/asf/ambari/blob/bc7e0aa7/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
index 42e81da..08aa564 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -104,7 +104,7 @@ public class InputFileTest {
 
     inputFile = new InputFile();
     inputFile.loadConfig(config);
-    inputFile.setFirstFilter(capture);
+    inputFile.addFilter(capture);
     inputFile.init();
   }
 
@@ -117,10 +117,10 @@ public class InputFileTest {
 
     init(testFile.getAbsolutePath());
 
-    InputMgr inputMgr = EasyMock.createStrictMock(InputMgr.class);
-    EasyMock.expect(inputMgr.getCheckPointFolderFile()).andReturn(checkPointDir);
-    EasyMock.replay(inputMgr);
-    inputFile.setInputMgr(inputMgr);
+    InputManager inputManager = EasyMock.createStrictMock(InputManager.class);
+    EasyMock.expect(inputManager.getCheckPointFolderFile()).andReturn(checkPointDir);
+    EasyMock.replay(inputManager);
+    inputFile.setInputManager(inputManager);
 
     inputFile.isReady();
     inputFile.start();
@@ -129,7 +129,7 @@ public class InputFileTest {
     for (int row = 0; row < 3; row++)
       assertEquals("Row #" + (row + 1) + " not correct", TEST_LOG_FILE_ROWS[row], rows.get(row));
 
-    EasyMock.verify(inputMgr);
+    EasyMock.verify(inputManager);
   }
 
   @Test
@@ -140,10 +140,10 @@ public class InputFileTest {
     File testFile = createFile("process6.log");
     init(testFile.getAbsolutePath());
 
-    InputMgr inputMgr = EasyMock.createStrictMock(InputMgr.class);
-    EasyMock.expect(inputMgr.getCheckPointFolderFile()).andReturn(checkPointDir).times(2);
-    EasyMock.replay(inputMgr);
-    inputFile.setInputMgr(inputMgr);
+    InputManager inputMabager = EasyMock.createStrictMock(InputManager.class);
+    EasyMock.expect(inputMabager.getCheckPointFolderFile()).andReturn(checkPointDir).times(2);
+    EasyMock.replay(inputMabager);
+    inputFile.setInputManager(inputMabager);
 
     inputFile.isReady();
     inputFile.start();
@@ -155,7 +155,7 @@ public class InputFileTest {
     for (int row = 0; row < 6; row++)
       assertEquals("Row #" + (row + 1) + " not correct", TEST_LOG_FILE_ROWS[row], rows.get(row));
 
-    EasyMock.verify(inputMgr);
+    EasyMock.verify(inputMabager);
   }
 
   @Test


[26/50] [abbrv] ambari git commit: AMBARI-18301. Log search, url params description. (Dharmesh Makwana via oleewere)

Posted by ol...@apache.org.
AMBARI-18301. Log search, url params description. (Dharmesh Makwana via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/82419b17
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/82419b17
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/82419b17

Branch: refs/heads/branch-dev-logsearch
Commit: 82419b17a288e4cd2c30620307b16ceeb8f4cbc7
Parents: 42ad402
Author: oleewere <ol...@gmail.com>
Authored: Fri Sep 2 12:02:39 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Thu Sep 8 01:34:00 2016 +0200

----------------------------------------------------------------------
 .../ambari/logsearch/doc/DocConstants.java      | 152 +++++++++----------
 1 file changed, 76 insertions(+), 76 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/82419b17/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
index c14fc08..0ceb76b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -21,96 +21,96 @@ package org.apache.ambari.logsearch.doc;
 public class DocConstants {
 
   public class CommonDescriptions {
-    public static final String X_AXIS_D = "";
-    public static final String Y_AXIS_D = "";
-    public static final String STACK_BY_D = "";
-    public static final String EXCLUDE_QUERY_D = "";
-    public static final String INCLUDE_QUERY_D = "";
-    public static final String MUST_BE_D = "";
-    public static final String MUST_NOT_D = "";
-    public static final String FROM_D = "";
-    public static final String TO_D = "";
-    public static final String UNIT_D = "";
-    public static final String QUERY_D = "";
-    public static final String COLUMN_QUERY_D = "";
-    public static final String I_MESSAGE_D = "";
-    public static final String G_E_MESSAGE_D = "";
-    public static final String E_MESSAGE_D = "";
+    public static final String X_AXIS_D = "The column which can be value for x-axis in graph formation";
+    public static final String Y_AXIS_D = "The column which can be value for y-axis in graph formation";
+    public static final String STACK_BY_D = "The graph property for stacking the plot";
+    public static final String EXCLUDE_QUERY_D = "Exclude the values in query result e.g.: [{message:*timeout*}]";
+    public static final String INCLUDE_QUERY_D = "Include the values in query result e.g.: [{message:*exception*}]";
+    public static final String MUST_BE_D = "Include the components, comman separated values";
+    public static final String MUST_NOT_D = "Exclude the components, comman separated values";
+    public static final String FROM_D = "Date range param, start date";
+    public static final String TO_D = "Date range param, end date";
+    public static final String UNIT_D = "Aggregate the data with time gap as unit i.e 1MINUTE";
+    public static final String QUERY_D = "not required";
+    public static final String COLUMN_QUERY_D = "not required";
+    public static final String I_MESSAGE_D = "Include query which will query againt message column";
+    public static final String G_E_MESSAGE_D = "not required";
+    public static final String E_MESSAGE_D = "Exclude query which will query againt message column";
     public static final String IS_LAST_PAGE_D = "";
-    public static final String FIELD_D = "";
-    public static final String FORMAT_D = "";
+    public static final String FIELD_D = "Get top ten values for particular field";
+    public static final String FORMAT_D = "File Export format, can be 'txt' or 'json'";
   }
 
   public class AuditOperationDescriptions {
-    public static final String GET_AUDIT_SCHEMA_FIELD_LIST_OD = "";
-    public static final String GET_AUDIT_LOGS_OD = "";
-    public static final String GET_AUDIT_COMPONENTS_OD = "";
-    public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "";
-    public static final String GET_TOP_AUDIT_USERS_OD = "";
-    public static final String GET_TOP_AUDIT_RESOURCES_OD = "";
-    public static final String GET_TOP_AUDIT_COMPONENTS_OD = "";
-    public static final String GET_LIVE_LOGS_COUNT_OD = "";
-    public static final String GET_REQUEST_USER_LINE_GRAPH_OD = "";
-    public static final String GET_ANY_GRAPH_DATA_OD = "";
-    public static final String EXPORT_USER_TALBE_TO_TEXT_FILE_OD = "";
-    public static final String GET_SERVICE_LOAD_OD = "";
+    public static final String GET_AUDIT_SCHEMA_FIELD_LIST_OD = "Get list of schema fields in audit collection";
+    public static final String GET_AUDIT_LOGS_OD = "Get the list of logs details";
+    public static final String GET_AUDIT_COMPONENTS_OD = "Get the list of audit components currently active or having data in Solr";
+    public static final String GET_AUDIT_LINE_GRAPH_DATA_OD = "Get the data required for line graph";
+    public static final String GET_TOP_AUDIT_USERS_OD = "Get the top audit users having maximum access";
+    public static final String GET_TOP_AUDIT_RESOURCES_OD = "Get the top audit resources having maximum access";
+    public static final String GET_TOP_AUDIT_COMPONENTS_OD = "not required";
+    public static final String GET_LIVE_LOGS_COUNT_OD = "not required";
+    public static final String GET_REQUEST_USER_LINE_GRAPH_OD = "not required";
+    public static final String GET_ANY_GRAPH_DATA_OD = "Get the data generic enough to use for graph plots";
+    public static final String EXPORT_USER_TALBE_TO_TEXT_FILE_OD = "Export the tables shown on Audit tab";
+    public static final String GET_SERVICE_LOAD_OD = "The graph for showing the top users accessing the services";
   }
 
   public class ServiceDescriptions {
-    public static final String LEVEL_D = "";
-    public static final String ADVANCED_SEARCH_D = "";
-    public static final String TREE_PARAMS_D = "";
-    public static final String START_TIME_D = "";
-    public static final String END_TIME_D = "";
-    public static final String FILE_NAME_D = "";
-    public static final String HOST_NAME_D = "";
-    public static final String COMPONENT_NAME_D = "";
-    public static final String FIND_D = "";
-    public static final String ID_D = "";
-    public static final String HOST_D = "";
-    public static final String COMPONENT_D = "";
-    public static final String KEYWORD_TYPE_D = "";
-    public static final String TOKEN_D = "";
-    public static final String SOURCE_LOG_ID_D = "";
-    public static final String G_MUST_NOT_D = "";
-    public static final String NUMBER_ROWS_D = "";
-    public static final String SCROLL_TYPE_D = "";
-    public static final String UTC_OFFSET_D = "";
+    public static final String LEVEL_D = "filter for log level";
+    public static final String ADVANCED_SEARCH_D = "not required";
+    public static final String TREE_PARAMS_D = "Host hierarchy shown on UI,filtering there is supported by this param";
+    public static final String START_TIME_D = "Date range param which is suportted from browser url";
+    public static final String END_TIME_D = "Date range param which is supported from browser url";
+    public static final String FILE_NAME_D = "File name filter which is supported from browser url";
+    public static final String HOST_NAME_D = "Host name filter which is supported from browser url";
+    public static final String COMPONENT_NAME_D = "Component name filter which is supported from browser url";
+    public static final String FIND_D = "Finding particular text on subsequent pages in case of table view with pagination";
+    public static final String ID_D = "Log id value for traversing to that particular record with that log id";
+    public static final String HOST_D = "filter for host";
+    public static final String COMPONENT_D = "filter for component";
+    public static final String KEYWORD_TYPE_D = "Serching the find param value in previous or next in paginated table";
+    public static final String TOKEN_D = "unique number used along with FIND_D. The request can be canceled using this token";
+    public static final String SOURCE_LOG_ID_D = "fetch the record set having that log Id";
+    public static final String G_MUST_NOT_D = "not required";
+    public static final String NUMBER_ROWS_D = "Getting rows after particular log entry - used in 'Preview' option";
+    public static final String SCROLL_TYPE_D = "Used in 'Preview' feature for getting records 'after' or 'before'";
+    public static final String UTC_OFFSET_D = "timezone offset";
   }
 
   public class ServiceOperationDescriptions {
-    public static final String SEARCH_LOGS_OD = "";
-    public static final String GET_HOSTS_OD = "";
-    public static final String GET_COMPONENTS_OD = "";
-    public static final String GET_AGGREGATED_INFO_OD = "";
-    public static final String GET_LOG_LEVELS_COUNT_OD = "";
-    public static final String GET_COMPONENTS_COUNT_OD = "";
-    public static final String GET_HOSTS_COUNT_OD = "";
-    public static final String GET_TREE_EXTENSION_OD = "";
-    public static final String GET_HISTOGRAM_DATA_OD = "";
-    public static final String CANCEL_FIND_REQUEST_OD = "";
-    public static final String EXPORT_TO_TEXT_FILE_OD = "";
-    public static final String GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD = "";
-    public static final String GET_EXTREME_DATES_FOR_BUNDLE_ID_OD = "";
-    public static final String GET_SERVICE_LOGS_FIELD_NAME_OD = "";
-    public static final String GET_ANY_GRAPH_DATA_OD = "";
-    public static final String GET_AFTER_BEFORE_LOGS_OD = "";
-    public static final String GET_HOST_LIST_BY_COMPONENT_OD = "";
-    public static final String GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD = "";
-    public static final String GET_HADOOP_SERVICE_CONFIG_JSON_OD = "";
+    public static final String SEARCH_LOGS_OD = "Searching logs entry";
+    public static final String GET_HOSTS_OD = "Get the list of service hosts currently active or having data in Solr";
+    public static final String GET_COMPONENTS_OD = "Get the list of service components currently active or having data in Solr";
+    public static final String GET_AGGREGATED_INFO_OD = "not required";
+    public static final String GET_LOG_LEVELS_COUNT_OD = "Get Log levels with their counts";
+    public static final String GET_COMPONENTS_COUNT_OD = "Get components with their counts";
+    public static final String GET_HOSTS_COUNT_OD = "Get hosts with their counts";
+    public static final String GET_TREE_EXTENSION_OD = "Get host and compoenets hierarchy";
+    public static final String GET_HISTOGRAM_DATA_OD = "Get data for histogram";
+    public static final String CANCEL_FIND_REQUEST_OD = "Cancel the FIND_D param request using TOKEN_D";
+    public static final String EXPORT_TO_TEXT_FILE_OD = "Export the table data in file";
+    public static final String GET_COMPONENT_LIST_WITH_LEVEL_COUNT_OD = "Get components with log level distribution count";
+    public static final String GET_EXTREME_DATES_FOR_BUNDLE_ID_OD = "Get the start and end time of particular bundle_id";
+    public static final String GET_SERVICE_LOGS_FIELD_NAME_OD = "Get service logs schema fields name (Human readable)";
+    public static final String GET_ANY_GRAPH_DATA_OD = "Get the data generic enough to use for graph plots";
+    public static final String GET_AFTER_BEFORE_LOGS_OD = "Preview feature data";
+    public static final String GET_HOST_LIST_BY_COMPONENT_OD = "Get host list of components";
+    public static final String GET_SERVICE_LOGS_SCHEMA_FIELD_NAME_OD = "Get service logs schema fields";
+    public static final String GET_HADOOP_SERVICE_CONFIG_JSON_OD = "Get the json having meta data of services supported by logsearch";
   }
 
 
   public class LogFileDescriptions {
-    public static final String HOST_D = "";
-    public static final String COMPONENT_D = "";
-    public static final String LOG_TYPE_D = "";
-    public static final String TAIL_SIZE_D = "";
+    public static final String HOST_D = "not required";
+    public static final String COMPONENT_D = "not required";
+    public static final String LOG_TYPE_D = "not required";
+    public static final String TAIL_SIZE_D = "not required";
   }
 
   public class LogFileOperationDescriptions {
-    public static final String SEARCH_LOG_FILES_OD = "";
-    public static final String GET_LOG_FILE_TAIL_OD = "";
+    public static final String SEARCH_LOG_FILES_OD = "not required";
+    public static final String GET_LOG_FILE_TAIL_OD = "not required";
   }
 
   public class PublicOperationDescriptions {
@@ -118,9 +118,9 @@ public class DocConstants {
   }
 
   public class UserConfigDescriptions {
-    public static final String USER_ID_D = "";
-    public static final String FILTER_NAME_D = "";
-    public static final String ROW_TYPE_D = "";
+    public static final String USER_ID_D = "Get config for a particular user id";
+    public static final String FILTER_NAME_D = "The saved query as filter in Solr, search is sopprted by this param";
+    public static final String ROW_TYPE_D = "Row type is solr to identify as filter query";
   }
 
   public class UserConfigOperationDescriptions {