You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2016/10/19 15:56:59 UTC

[1/6] ambari git commit: AMBARI-18606. Improve Audit Log processing by Logfeeder (Miklos Gergely via oleewere)

Repository: ambari
Updated Branches:
  refs/heads/logsearch-ga ccc892565 -> d5e152afc


http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log b/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log
new file mode 100644
index 0000000..90e24e3
--- /dev/null
+++ b/ambari-logsearch/docker/test-logs/ambari-server/ambari-audit.log
@@ -0,0 +1,390 @@
+2016-10-03T16:26:13.333Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles(
+    Ambari: Ambari Administrator
+), Status(Success)
+2016-10-03T16:26:54.834Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat6/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat6), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6)
+2016-10-03T16:26:54.845Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat7/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat7), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.0.0/)
+2016-10-03T16:26:54.847Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat6/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat6), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0/)
+2016-10-03T16:26:54.857Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/debian7/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(debian7), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/debian7)
+2016-10-03T16:26:54.857Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/redhat7/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(redhat7), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7)
+2016-10-03T16:26:54.860Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/debian7/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(debian7), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/debian7/2.x/updates/2.5.0.0)
+2016-10-03T16:26:54.935Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse12/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse12), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/sles12)
+2016-10-03T16:26:54.943Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse11/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse11), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/suse11sp3/2.x/updates/2.5.0.0)
+2016-10-03T16:26:54.945Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu12/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu12), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/ubuntu12/2.x/updates/2.5.0.0)
+2016-10-03T16:26:54.951Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse12/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse12), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/sles12/2.x/updates/2.5.0.0)
+2016-10-03T16:26:54.954Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/suse11/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(suse11), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/suse11sp3)
+2016-10-03T16:26:54.959Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu12/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu12), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu12)
+2016-10-03T16:26:54.997Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu14/repositories/HDP-2.5), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu14), Repo id(HDP-2.5), Base URL(http://public-repo-1.hortonworks.com/HDP/ubuntu14/2.x/updates/2.5.0.0)
+2016-10-03T16:26:55.003Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository update), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/operating_systems/ubuntu14/repositories/HDP-UTILS-1.1.0.21), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), OS(ubuntu14), Repo id(HDP-UTILS-1.1.0.21), Base URL(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu14)
+2016-10-03T16:26:58.669Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:27:20.063Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:27:24.185Z, User(admin), RemoteIp(192.168.64.1), Operation(Request from server), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/requests), ResultStatus(202 Accepted), Command(null), Cluster name(null)
+2016-10-03T16:27:24.206Z, User(admin), Operation(Check host), Status(IN_PROGRESS), RequestId(1)
+2016-10-03T16:27:24.207Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(2), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:27:24.211Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(3), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:27:24.213Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(4), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:27:24.214Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(1), TaskId(5), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:27:25.836Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(2), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:27:26.824Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(3), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:27:27.830Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(5), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:27:28.831Z, User(admin), Operation(Check host), Status(COMPLETED), RequestId(1)
+2016-10-03T16:27:28.833Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(1), TaskId(4), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:27:29.428Z, User(admin), RemoteIp(192.168.64.1), Operation(Request from server), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/requests), ResultStatus(202 Accepted), Command(null), Cluster name(null)
+2016-10-03T16:27:29.440Z, User(admin), Operation(Check host), Status(IN_PROGRESS), RequestId(2)
+2016-10-03T16:27:29.443Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(6), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:27:29.443Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(7), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:27:29.444Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(8), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:27:29.444Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(QUEUED), RequestId(2), TaskId(9), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:27:38.816Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(7), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:27:39.818Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(9), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:27:40.822Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(8), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:27:42.817Z, User(admin), Operation(Check host), Status(COMPLETED), RequestId(2)
+2016-10-03T16:27:42.818Z, User(admin), Operation(ACTIONEXECUTE check_host), Status(COMPLETED), RequestId(2), TaskId(6), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:28:19.410Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:28:49.829Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:28:56.413Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:29:02.762Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:29:20.084Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:29:30.390Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions), ResultStatus(201 Created)
+2016-10-03T16:29:30.430Z, User(admin), RemoteIp(192.168.64.1), Operation(Repository version change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.5/repository_versions/1), ResultStatus(200 OK), Stack(HDP), Stack version(2.5), Display name(null), Repo version(null), Repositories(
+Operating system: debian7
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/debian7/2.x/updates/2.5.0.0)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/debian7)
+Operating system: redhat6
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0/)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6)
+Operating system: redhat7
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.5.0.0/)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos7)
+Operating system: suse11
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/suse11sp3/2.x/updates/2.5.0.0)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/suse11sp3)
+Operating system: suse12
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/sles12/2.x/updates/2.5.0.0)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/sles12)
+Operating system: ubuntu12
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/ubuntu12/2.x/updates/2.5.0.0)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu12)
+Operating system: ubuntu14
+    Repository ID(HDP-2.5), Repository name(HDP), Base url(http://public-repo-1.hortonworks.com/HDP/ubuntu14/2.x/updates/2.5.0.0)
+    Repository ID(HDP-UTILS-1.1.0.21), Repository name(HDP-UTILS), Base url(http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/ubuntu14)
+)
+2016-10-03T16:29:30.578Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(201 Created), VersionNumber(Vnull), VersionNote(null)
+2016-10-03T16:29:30.828Z, User(admin), RemoteIp(192.168.64.1), Operation(null), RequestId(null), Status(Successfully queued)
+2016-10-03T16:29:32.696Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(V1), VersionNote(Initial configurations for HDFS)
+2016-10-03T16:29:32.890Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=HDFS), ResultStatus(201 Created)
+2016-10-03T16:29:32.996Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=ZOOKEEPER), ResultStatus(201 Created)
+2016-10-03T16:29:33.044Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=AMBARI_INFRA), ResultStatus(201 Created)
+2016-10-03T16:29:33.099Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=AMBARI_METRICS), ResultStatus(201 Created)
+2016-10-03T16:29:33.239Z, User(admin), RemoteIp(192.168.64.1), Operation(Host addition), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:29:33.359Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(NAMENODE)
+2016-10-03T16:29:33.428Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(SECONDARY_NAMENODE)
+2016-10-03T16:29:33.516Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(ZOOKEEPER_SERVER)
+2016-10-03T16:29:33.562Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(INFRA_SOLR)
+2016-10-03T16:29:33.629Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_GRAFANA)
+2016-10-03T16:29:33.672Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_COLLECTOR)
+2016-10-03T16:29:33.780Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(DATANODE)
+2016-10-03T16:29:33.851Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(HDFS_CLIENT)
+2016-10-03T16:29:33.926Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(ZOOKEEPER_CLIENT)
+2016-10-03T16:29:33.988Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(INFRA_SOLR_CLIENT)
+2016-10-03T16:29:34.107Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(METRICS_MONITOR)
+2016-10-03T16:29:34.814Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services (myCluster)), RequestId(3), Status(Successfully queued)
+2016-10-03T16:29:35.035Z, User(admin), Operation(Install Services), Status(IN_PROGRESS), RequestId(3)
+2016-10-03T16:29:35.035Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(10), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(3), TaskId(11), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(QUEUED), RequestId(3), TaskId(12), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.036Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(3), TaskId(13), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.037Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(QUEUED), RequestId(3), TaskId(14), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(15), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL NAMENODE), Status(QUEUED), RequestId(3), TaskId(16), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.043Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(3), TaskId(17), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.045Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(18), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:29:35.046Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(19), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(20), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL SECONDARY_NAMENODE), Status(QUEUED), RequestId(3), TaskId(21), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:29:35.047Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(22), Hostname(c6402.ambari.apache.org)
+2016-10-03T16:29:35.048Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(23), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:29:35.051Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(QUEUED), RequestId(3), TaskId(24), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(25), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(3), TaskId(26), Hostname(c6403.ambari.apache.org)
+2016-10-03T16:29:35.052Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(3), TaskId(27), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:29:35.053Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(3), TaskId(28), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:29:35.053Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(3), TaskId(29), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:29:35.057Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(3), TaskId(30), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:29:35.059Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(3), TaskId(31), Hostname(c6404.ambari.apache.org)
+2016-10-03T16:29:35.629Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T16:53:55.860Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(3), TaskId(10), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:53:56.837Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(3), TaskId(11), Hostname(c6401.ambari.apache.org)
+2016-10-03T16:58:35.834Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(3), TaskId(19), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:00:08.822Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(3), TaskId(20), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:00:09.827Z, User(admin), Operation(INSTALL SECONDARY_NAMENODE), Status(COMPLETED), RequestId(3), TaskId(21), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:00:10.831Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(3), TaskId(22), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:00:15.818Z, User(admin), Operation(Install Services), Status(FAILED), RequestId(3)
+2016-10-03T17:00:15.818Z, User(admin), Operation(INSTALL DATANODE), Status(FAILED), RequestId(3), TaskId(23), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:00:16.820Z, User(admin), Operation(INSTALL DATANODE), Status(FAILED), RequestId(3), TaskId(27), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:00:23.828Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(3), TaskId(28), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:00:54.818Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(3), TaskId(29), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:01:39.826Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(3), TaskId(30), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:01:41.825Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(3), TaskId(31), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:31.822Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(FAILED), RequestId(3), TaskId(12), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:31.839Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(FAILED), RequestId(3), TaskId(24), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:32.252Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(ABORTED), RequestId(3), TaskId(13), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(ABORTED), RequestId(3), TaskId(14), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(ABORTED), RequestId(3), TaskId(15), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL NAMENODE), Status(ABORTED), RequestId(3), TaskId(16), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.253Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(ABORTED), RequestId(3), TaskId(17), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(ABORTED), RequestId(3), TaskId(18), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(ABORTED), RequestId(3), TaskId(25), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:32.257Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(ABORTED), RequestId(3), TaskId(26), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:48.818Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services on all hosts (myCluster)), RequestId(4), Status(Successfully queued)
+2016-10-03T17:02:48.875Z, User(admin), Operation(Install Components), Status(IN_PROGRESS), RequestId(4)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(4), TaskId(32), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(QUEUED), RequestId(4), TaskId(33), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(4), TaskId(34), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(QUEUED), RequestId(4), TaskId(35), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(4), TaskId(36), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL NAMENODE), Status(QUEUED), RequestId(4), TaskId(37), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(4), TaskId(38), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.875Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(4), TaskId(39), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(4), TaskId(40), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(QUEUED), RequestId(4), TaskId(41), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(QUEUED), RequestId(4), TaskId(42), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(QUEUED), RequestId(4), TaskId(43), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:48.876Z, User(admin), Operation(INSTALL DATANODE), Status(QUEUED), RequestId(4), TaskId(44), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(QUEUED), RequestId(4), TaskId(45), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(QUEUED), RequestId(4), TaskId(46), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:48.878Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(QUEUED), RequestId(4), TaskId(47), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:50.836Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(4), TaskId(44), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:51.825Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(4), TaskId(32), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:02:51.836Z, User(admin), Operation(INSTALL HDFS_CLIENT), Status(COMPLETED), RequestId(4), TaskId(45), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:52.821Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(4), TaskId(46), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:02:52.855Z, User(admin), Operation(INSTALL DATANODE), Status(COMPLETED), RequestId(4), TaskId(40), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:02:53.817Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(4), TaskId(47), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:03:38.840Z, User(admin), Operation(INSTALL INFRA_SOLR), Status(COMPLETED), RequestId(4), TaskId(33), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:03:39.821Z, User(admin), Operation(INSTALL INFRA_SOLR_CLIENT), Status(COMPLETED), RequestId(4), TaskId(34), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:04:32.819Z, User(admin), Operation(INSTALL METRICS_GRAFANA), Status(COMPLETED), RequestId(4), TaskId(35), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:04:34.827Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(4), TaskId(36), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:04:35.841Z, User(admin), Operation(INSTALL NAMENODE), Status(COMPLETED), RequestId(4), TaskId(37), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:04:37.835Z, User(admin), Operation(INSTALL ZOOKEEPER_CLIENT), Status(COMPLETED), RequestId(4), TaskId(38), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:04:37.836Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(4), TaskId(39), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:09.821Z, User(admin), Operation(INSTALL METRICS_COLLECTOR), Status(COMPLETED), RequestId(4), TaskId(41), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:09.844Z, User(admin), Operation(INSTALL METRICS_MONITOR), Status(COMPLETED), RequestId(4), TaskId(42), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:11.839Z, User(admin), Operation(Install Components), Status(COMPLETED), RequestId(4)
+2016-10-03T17:07:11.839Z, User(admin), Operation(INSTALL ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(4), TaskId(43), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:14.709Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: all services (myCluster)), RequestId(5), Status(Successfully queued)
+2016-10-03T17:07:14.751Z, User(admin), Operation(Start Services), Status(IN_PROGRESS), RequestId(5)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(48), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(49), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(50), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(51), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(52), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(QUEUED), RequestId(5), TaskId(53), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:14.751Z, User(admin), Operation(START METRICS_MONITOR), Status(QUEUED), RequestId(5), TaskId(54), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:07:18.817Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(52), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:18.823Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(50), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:18.830Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(54), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:07:19.900Z, User(admin), Operation(START METRICS_MONITOR), Status(COMPLETED), RequestId(5), TaskId(48), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:19.918Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(53), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:19.935Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(51), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:21.820Z, User(admin), Operation(START ZOOKEEPER_SERVER), Status(COMPLETED), RequestId(5), TaskId(49), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:22.836Z, User(admin), Operation(START INFRA_SOLR), Status(QUEUED), RequestId(5), TaskId(55), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:22.836Z, User(admin), Operation(SERVICE_CHECK ZOOKEEPER_QUORUM_SERVICE_CHECK), Details(SERVICE_CHECK ZOOKEEPER), Status(QUEUED), RequestId(5), TaskId(56), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:37.819Z, User(admin), Operation(START INFRA_SOLR), Status(COMPLETED), RequestId(5), TaskId(55), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:46.825Z, User(admin), Operation(SERVICE_CHECK ZOOKEEPER_QUORUM_SERVICE_CHECK), Details(SERVICE_CHECK ZOOKEEPER), Status(COMPLETED), RequestId(5), TaskId(56), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:47.041Z, User(admin), Operation(SERVICE_CHECK AMBARI_INFRA_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_INFRA), Status(QUEUED), RequestId(5), TaskId(57), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(58), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:47.041Z, User(admin), Operation(START NAMENODE), Status(QUEUED), RequestId(5), TaskId(59), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(60), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:47.041Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(61), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:47.042Z, User(admin), Operation(START DATANODE), Status(QUEUED), RequestId(5), TaskId(62), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:07:48.823Z, User(admin), Operation(SERVICE_CHECK AMBARI_INFRA_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_INFRA), Status(COMPLETED), RequestId(5), TaskId(57), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:07:52.828Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(61), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:07:52.844Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(60), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:07:53.820Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(62), Hostname(c6404.ambari.apache.org)
+2016-10-03T17:07:53.833Z, User(admin), Operation(START DATANODE), Status(COMPLETED), RequestId(5), TaskId(58), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:08:13.818Z, User(admin), Operation(START NAMENODE), Status(COMPLETED), RequestId(5), TaskId(59), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:08:14.280Z, User(admin), Operation(START SECONDARY_NAMENODE), Status(QUEUED), RequestId(5), TaskId(63), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:08:20.821Z, User(admin), Operation(START SECONDARY_NAMENODE), Status(COMPLETED), RequestId(5), TaskId(63), Hostname(c6402.ambari.apache.org)
+2016-10-03T17:08:21.333Z, User(admin), Operation(SERVICE_CHECK HDFS_SERVICE_CHECK), Details(SERVICE_CHECK HDFS), Status(QUEUED), RequestId(5), TaskId(64), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:08:21.334Z, User(admin), Operation(START METRICS_COLLECTOR), Status(QUEUED), RequestId(5), TaskId(65), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:08:29.815Z, User(admin), Operation(SERVICE_CHECK HDFS_SERVICE_CHECK), Details(SERVICE_CHECK HDFS), Status(COMPLETED), RequestId(5), TaskId(64), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:09:17.819Z, User(admin), Operation(START METRICS_COLLECTOR), Status(COMPLETED), RequestId(5), TaskId(65), Hostname(c6403.ambari.apache.org)
+2016-10-03T17:09:18.621Z, User(admin), Operation(SERVICE_CHECK AMBARI_METRICS_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_METRICS), Status(QUEUED), RequestId(5), TaskId(66), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:09:18.621Z, User(admin), Operation(START METRICS_GRAFANA), Status(QUEUED), RequestId(5), TaskId(67), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:09:39.816Z, User(admin), Operation(SERVICE_CHECK AMBARI_METRICS_SERVICE_CHECK), Details(SERVICE_CHECK AMBARI_METRICS), Status(COMPLETED), RequestId(5), TaskId(66), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:09:43.823Z, User(admin), Operation(Start Services), Status(COMPLETED), RequestId(5)
+2016-10-03T17:09:43.824Z, User(admin), Operation(START METRICS_GRAFANA), Status(COMPLETED), RequestId(5), TaskId(67), Hostname(c6401.ambari.apache.org)
+2016-10-03T17:14:46.087Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/version_definitions?dry_run=true), ResultStatus(201 Created)
+2016-10-03T17:14:56.802Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(Vnull), VersionNote(null)
+2016-10-04T05:55:42.000Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.020Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.024Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.027Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.029Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.033Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.045Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.047Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.049Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.051Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.054Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.057Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.061Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.064Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.068Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.070Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.072Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.074Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.078Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.080Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.081Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.083Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.085Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.087Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.088Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.090Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.091Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.092Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.100Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.100Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.102Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.103Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.104Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.105Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.107Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.108Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.109Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.110Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:42.130Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T05:55:44.919Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:02:57.511Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:02:59.309Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles(
+    Ambari: Ambari Administrator
+), Status(Success)
+2016-10-04T08:05:40.063Z, User(admin), RemoteIp(192.168.64.1), RequestType(POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/widget_layouts), ResultStatus(201 Created)
+2016-10-04T08:05:40.085Z, User(admin), RemoteIp(192.168.64.1), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/users/admin/activeWidgetLayouts/), ResultStatus(200 OK)
+2016-10-04T08:11:45.150Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: METRICS_COLLECTOR/AMBARI_METRICS on c6403.ambari.apache.org (myCluster)), Host name(c6403.ambari.apache.org), RequestId(6), Status(Successfully queued)
+2016-10-04T08:11:45.221Z, User(admin), Operation(Start Metrics Collector), Status(IN_PROGRESS), RequestId(6)
+2016-10-04T08:11:45.222Z, User(admin), Operation(START METRICS_COLLECTOR), Status(QUEUED), RequestId(6), TaskId(102), Hostname(c6403.ambari.apache.org)
+2016-10-04T08:15:30.413Z, User(admin), Operation(Start Metrics Collector), Status(COMPLETED), RequestId(6)
+2016-10-04T08:15:30.414Z, User(admin), Operation(START METRICS_COLLECTOR), Status(COMPLETED), RequestId(6), TaskId(102), Hostname(c6403.ambari.apache.org)
+2016-10-04T08:17:44.550Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.552Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.554Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.556Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.557Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.559Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.561Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.564Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:44.595Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:45.370Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-04T08:17:45.495Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T12:06:48.400Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T12:13:51.541Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles(
+    Ambari: Ambari Administrator
+), Status(Success)
+2016-10-05T12:14:27.945Z, User(admin), RemoteIp(192.168.64.1), Operation(null), RequestId(null), Status(Successfully queued)
+2016-10-05T12:14:28.414Z, User(admin), RemoteIp(192.168.64.1), Operation(Configuration change), RequestType(PUT), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster), ResultStatus(200 OK), VersionNumber(V1), VersionNote(Initial configurations for Log Search)
+2016-10-05T12:14:28.562Z, User(admin), RemoteIp(192.168.64.1), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/services?ServiceInfo/service_name=LOGSEARCH), ResultStatus(201 Created)
+2016-10-05T12:14:28.630Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(LOGSEARCH_SERVER)
+2016-10-05T12:14:28.744Z, User(admin), RemoteIp(192.168.64.1), Operation(Component addition to host), RequestType(QUERY_POST), url(http://c6401.ambari.apache.org:8080/api/v1/clusters/myCluster/hosts), ResultStatus(201 Created), Hostname(null), Component(LOGSEARCH_LOGFEEDER)
+2016-10-05T12:14:29.066Z, User(admin), RemoteIp(192.168.64.1), Operation(INSTALLED: all services (myCluster)), RequestId(7), Status(Successfully queued)
+2016-10-05T12:14:29.138Z, User(admin), Operation(Install Services), Status(IN_PROGRESS), RequestId(7)
+2016-10-05T12:14:29.146Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(152), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:14:29.147Z, User(admin), Operation(INSTALL LOGSEARCH_SERVER), Status(QUEUED), RequestId(7), TaskId(153), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:14:29.147Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(154), Hostname(c6402.ambari.apache.org)
+2016-10-05T12:14:29.148Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(155), Hostname(c6403.ambari.apache.org)
+2016-10-05T12:14:29.151Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(7), TaskId(156), Hostname(c6404.ambari.apache.org)
+2016-10-05T12:14:32.964Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(152), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:14:34.925Z, User(admin), Operation(INSTALL LOGSEARCH_SERVER), Status(COMPLETED), RequestId(7), TaskId(153), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:14:52.942Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(155), Hostname(c6403.ambari.apache.org)
+2016-10-05T12:14:52.965Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(156), Hostname(c6404.ambari.apache.org)
+2016-10-05T12:15:06.922Z, User(admin), Operation(Install Services), Status(COMPLETED), RequestId(7)
+2016-10-05T12:15:06.928Z, User(admin), Operation(INSTALL LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(7), TaskId(154), Hostname(c6402.ambari.apache.org)
+2016-10-05T12:15:11.172Z, User(admin), RemoteIp(192.168.64.1), Operation(STARTED: all services (myCluster)), RequestId(8), Status(Successfully queued)
+2016-10-05T12:15:11.180Z, User(admin), Operation(Start Added Services), Status(IN_PROGRESS), RequestId(8)
+2016-10-05T12:15:11.180Z, User(admin), Operation(START LOGSEARCH_SERVER), Status(QUEUED), RequestId(8), TaskId(157), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:15:20.953Z, User(admin), Operation(START LOGSEARCH_SERVER), Status(COMPLETED), RequestId(8), TaskId(157), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:15:21.328Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(158), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:15:21.328Z, User(admin), Operation(SERVICE_CHECK LOGSEARCH_SERVICE_CHECK), Details(SERVICE_CHECK LOGSEARCH), Status(QUEUED), RequestId(8), TaskId(159), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(160), Hostname(c6402.ambari.apache.org)
+2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(161), Hostname(c6403.ambari.apache.org)
+2016-10-05T12:15:21.329Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(QUEUED), RequestId(8), TaskId(162), Hostname(c6404.ambari.apache.org)
+2016-10-05T12:15:22.968Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(160), Hostname(c6402.ambari.apache.org)
+2016-10-05T12:15:23.025Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(162), Hostname(c6404.ambari.apache.org)
+2016-10-05T12:15:26.941Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(158), Hostname(c6401.ambari.apache.org)
+2016-10-05T12:15:48.950Z, User(admin), Operation(START LOGSEARCH_LOGFEEDER), Status(COMPLETED), RequestId(8), TaskId(161), Hostname(c6403.ambari.apache.org)
+2016-10-05T12:15:55.920Z, User(admin), Operation(Start Added Services), Status(COMPLETED), RequestId(8)
+2016-10-05T12:15:55.920Z, User(admin), Operation(SERVICE_CHECK LOGSEARCH_SERVICE_CHECK), Details(SERVICE_CHECK LOGSEARCH), Status(COMPLETED), RequestId(8), TaskId(159), Hostname(c6401.ambari.apache.org)
+2016-10-05T13:09:33.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T13:09:33.055Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T13:09:33.124Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T13:09:34.863Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-05T13:09:55.468Z, User(admin), RemoteIp(192.168.64.1), Operation(User login), Roles(
+    Ambari: Ambari Administrator
+), Status(Success)
+2016-10-06T07:44:56.131Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-06T07:44:56.219Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-06T07:44:56.326Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)
+2016-10-06T07:45:01.686Z, User(null), RemoteIp(192.168.64.1), Operation(User login), Roles(
+), Status(Failed), Reason(Authentication required)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
index e2646ff..36507c9 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
@@ -271,6 +271,10 @@
       "value_borders":"()",
       "post_map_values":{
         "User":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
           "map_fieldname":{
             "new_fieldname":"reqUser"
           }
@@ -280,6 +284,11 @@
             "new_fieldname":"host"
           }
         },
+        "Host name":{
+          "map_fieldname":{
+            "new_fieldname":"host"
+          }
+        },
         "RemoteIp":{
           "map_fieldname":{
             "new_fieldname":"cliIP"
@@ -315,8 +324,113 @@
             "new_fieldname":"resource"
           }
         },
+        "Cluster name":{
+          "map_fieldname":{
+            "new_fieldname":"cluster"
+          }
+        },
+        "Reason":{
+          "map_fieldname":{
+            "new_fieldname":"reason"
+          }
+        },
+        "Base URL":{
+          "map_fieldname":{
+            "new_fieldname":"ws_base_url"
+          }
+        },
+        "Command":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_command"
+          }
+        },
+        "Component":{
+          "map_fieldname":{
+            "new_fieldname":"ws_component"
+          }
+        },
+        "Details":{
+          "map_fieldname":{
+            "new_fieldname":"ws_details"
+          }
+        },
+        "Display name":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_display_name"
+          }
+        },
+        "OS":{
+          "map_fieldname":{
+            "new_fieldname":"ws_os"
+          }
+        },
+        "Repo id":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_id"
+          }
+        },
+        "Repo version":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_version"
+          }
+        },
+        "Repositories":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repositories"
+          }
+        },
+        "Roles":{
+          "map_fieldname":{
+            "new_fieldname":"ws_roles"
+          }
+        },
+        "Stack":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack"
+          }
+        },
+        "Stack version":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack_version"
+          }
+        },
+        "VersionNote":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_note"
+          }
+        },
+        "VersionNumber":{
+          "map_fieldvalue":{
+            "pre_value":"Vnull",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_number"
+          }
+        },
         "Status":[
           {
+            "map_fieldcopy":{
+              "copy_name": "ws_status"
+            }
+          },
+          {
             "map_fieldvalue":{
               "pre_value":"Success",
               "post_value":"1"
@@ -414,6 +528,11 @@
         ],
         "ResultStatus":[
           {
+            "map_fieldcopy":{
+              "copy_name": "ws_result_status"
+            }
+          },
+          {
             "map_fieldvalue":{
               "pre_value":"200 OK",
               "post_value":"1"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
index 3fea3d3..a40e52d 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
@@ -41,7 +41,7 @@
       "post_map_values":{
         "logtime":{
           "map_date":{
-            "target_target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
           }
 
         }


[2/6] ambari git commit: AMBARI-18606. Improve Audit Log processing by Logfeeder (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18606. Improve Audit Log processing by Logfeeder (Miklos Gergely via oleewere)

Change-Id: I9e357536115a691801013932cf13051908170d93


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8073a1bc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8073a1bc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8073a1bc

Branch: refs/heads/logsearch-ga
Commit: 8073a1bcb9c9b36a7a03fa85d70841da8886f711
Parents: ccc8925
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Oct 19 17:24:58 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Wed Oct 19 17:24:58 2016 +0200

----------------------------------------------------------------------
 .../ambari/logfeeder/filter/FilterKeyValue.java |  65 ++-
 .../logfeeder/mapper/MapperFieldCopy.java       |  58 ++
 .../src/main/resources/alias_config.json        |   3 +
 .../logfeeder/mapper/MapperFieldCopyTest.java   |  71 +++
 .../configsets/audit_logs/conf/managed-schema   |   5 +
 .../logsearch/common/LogSearchConstants.java    |   2 +-
 .../ambari/logsearch/dao/AuditSolrDao.java      |   2 +-
 .../ambari/logsearch/dao/SolrCollectionDao.java |   2 +-
 .../logsearch/dao/SolrSchemaFieldDao.java       | 105 ++--
 .../apache/ambari/logsearch/dao/UserDao.java    |   2 +-
 .../logsearch/graph/GraphDataGenerator.java     |  10 +-
 .../ambari/logsearch/manager/ManagerBase.java   |   3 +-
 .../logsearch/manager/ServiceLogsManager.java   |   6 +-
 .../logsearch/manager/UserConfigManager.java    |   8 +-
 .../logsearch/model/response/CommonLogData.java |  13 +-
 .../logsearch/solr/model/SolrAuditLogData.java  |   1 -
 .../logsearch/solr/model/SolrCommonLogData.java |  42 ++
 .../solr/model/SolrServiceLogData.java          |  34 --
 .../apache/ambari/logsearch/util/SolrUtil.java  |  62 +-
 .../LogsearchKRBAuthenticationFilter.java       |  12 +-
 .../logsearch/web/security/LdapProperties.java  |   2 +-
 .../scripts/views/audit/AuditTabLayoutView.js   |   2 +-
 .../src/main/webapp/static/schema_fields.json   |  19 +-
 .../test-config/logfeeder/logfeeder.properties  |   3 +-
 .../shipper-conf/input.config-ambari.json       | 585 +++++++++++++++++++
 .../test-logs/ambari-server/ambari-audit.log    | 390 +++++++++++++
 .../templates/input.config-ambari.json.j2       | 119 ++++
 .../templates/input.config-falcon.json.j2       |   2 +-
 28 files changed, 1458 insertions(+), 170 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
index 5bb15ff..b04a439 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/filter/FilterKeyValue.java
@@ -19,6 +19,7 @@
 
 package org.apache.ambari.logfeeder.filter;
 
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
@@ -68,18 +69,26 @@ public class FilterKeyValue extends Filter {
     if (sourceField == null) {
       return;
     }
-    Object valueObj = jsonObj.get(sourceField);
-    if (valueObj != null) {
+    if (jsonObj.containsKey(sourceField)) {
+      String keyValueString = (String) jsonObj.get(sourceField);
+      Map<String, String> valueMap = new HashMap<>();
+      if (valueBorders != null) {
+        keyValueString = preProcessBorders(keyValueString, valueMap);
+      }
+      
       String splitPattern = Pattern.quote(fieldSplit);
-      String[] tokens = valueObj.toString().split(splitPattern);
+      String[] tokens = keyValueString.split(splitPattern);
       for (String nv : tokens) {
         String[] nameValue = getNameValue(nv);
         String name = nameValue != null && nameValue.length == 2 ? nameValue[0] : null;
         String value = nameValue != null && nameValue.length == 2 ? nameValue[1] : null;
         if (name != null && value != null) {
-            jsonObj.put(name, value);
-         } else {
-           logParseError("name=" + name + ", pair=" + nv + ", field=" + sourceField + ", field_value=" + valueObj);
+          if (valueMap.containsKey(value)) {
+            value = valueMap.get(value);
+          }
+          jsonObj.put(name, value);
+        } else {
+         logParseError("name=" + name + ", pair=" + nv + ", field=" + sourceField + ", field_value=" + keyValueString);
         }
       }
     }
@@ -87,19 +96,41 @@ public class FilterKeyValue extends Filter {
     statMetric.value++;
   }
 
-  private String[] getNameValue(String nv) {
-    if (valueBorders != null) {
-      if (nv.charAt(nv.length() - 1) == valueBorders.charAt(1)) {
-        String splitPattern = Pattern.quote("" + valueBorders.charAt(0));
-        return nv.substring(0, nv.length() - 1).split(splitPattern);
-      } else {
-        return null;
+  private String preProcessBorders(String keyValueString, Map<String, String> valueMap) {
+    char openBorder = valueBorders.charAt(0);
+    char closeBorder = valueBorders.charAt(1);
+    
+    StringBuilder processed = new StringBuilder();
+    int lastPos = 0;
+    int openBorderNum = 0;
+    int valueNum = 0;
+    for (int pos = 0; pos < keyValueString.length(); pos++) {
+      char c = keyValueString.charAt(pos);
+      if (c == openBorder) {
+        if (openBorderNum == 0 ) {
+          processed.append(keyValueString.substring(lastPos, pos));
+          lastPos = pos + 1;
+        }
+        openBorderNum++;
+      }
+      if (c == closeBorder) {
+        openBorderNum--;
+        if (openBorderNum == 0) {
+          String value = keyValueString.substring(lastPos, pos).trim();
+          String valueId = "$VALUE" + (++valueNum);
+          valueMap.put(valueId, value);
+          processed.append(valueSplit + valueId);
+          lastPos = pos + 1;
+        }
       }
     }
-    else {
-      String splitPattern = Pattern.quote(valueSplit);
-      return nv.split(splitPattern);
-    }
+    
+    return processed.toString();
+  }
+
+  private String[] getNameValue(String nv) {
+    String splitPattern = Pattern.quote(valueSplit);
+    return nv.split(splitPattern);
   }
 
   private void logParseError(String inputStr) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java
new file mode 100644
index 0000000..39e1ff4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopy.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Overrides the value for the field
+ */
+public class MapperFieldCopy extends Mapper {
+  private static final Logger LOG = Logger.getLogger(MapperFieldCopy.class);
+  
+  private String copyName = null;
+
+  @Override
+  public boolean init(String inputDesc, String fieldName, String mapClassCode, Object mapConfigs) {
+    init(inputDesc, fieldName, mapClassCode);
+    if (!(mapConfigs instanceof Map)) {
+      LOG.fatal("Can't initialize object. mapConfigs class is not of type Map. " + mapConfigs.getClass().getName());
+      return false;
+    }
+    
+    @SuppressWarnings("unchecked")
+    Map<String, Object> mapObjects = (Map<String, Object>) mapConfigs;
+    copyName = (String) mapObjects.get("copy_name");
+    if (StringUtils.isEmpty(copyName)) {
+      LOG.fatal("Map copy name is empty.");
+      return false;
+    }
+    return true;
+  }
+
+  @Override
+  public Object apply(Map<String, Object> jsonObj, Object value) {
+    jsonObj.put(copyName, value);
+    return value;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json
index b15db63..e2ed625 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/alias_config.json
@@ -25,6 +25,9 @@
     "map_date": {
       "klass": "org.apache.ambari.logfeeder.mapper.MapperDate"
     },
+    "map_fieldcopy": {
+      "klass": "org.apache.ambari.logfeeder.mapper.MapperFieldCopy"
+    },
     "map_fieldname": {
       "klass": "org.apache.ambari.logfeeder.mapper.MapperFieldName"
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
new file mode 100644
index 0000000..108c96e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.logfeeder.mapper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class MapperFieldCopyTest {
+  private static final Logger LOG = Logger.getLogger(MapperFieldCopyTest.class);
+
+  @Test
+  public void testMapperFieldCopy_copyField() {
+    LOG.info("testMapperFieldCopy_copyField()");
+
+    Map<String, Object> mapConfigs = new HashMap<>();
+    mapConfigs.put("copy_name", "someOtherField");
+
+    MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
+    assertTrue("Could not initialize!", mapperFieldCopy.init(null, "someField", null, mapConfigs));
+
+    Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("someField", "someValue");
+
+    mapperFieldCopy.apply(jsonObj, "someValue");
+
+    assertEquals("Old field name wasn't removed", "someValue", jsonObj.remove("someField"));
+    assertEquals("New field wasn't put", "someValue", jsonObj.remove("someOtherField"));
+    assertTrue("jsonObj is not empty", jsonObj.isEmpty());
+  }
+
+  @Test
+  public void testMapperFielCopy_configNotMap() {
+    LOG.info("testMapperFieldCopy_configNotMap()");
+
+    MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
+    assertFalse("Was able to initialize!", mapperFieldCopy.init(null, "someField", null, ""));
+  }
+
+  @Test
+  public void testMapperFieldCopy_noNewFieldName() {
+    LOG.info("testMapperFieldCopy_noNewFieldName()");
+
+    Map<String, Object> mapConfigs = new HashMap<>();
+
+    MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
+    assertFalse("Was able to initialize!", mapperFieldCopy.init(null, "someField", null, mapConfigs));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
index f77bec0..4cd412b 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/configsets/audit_logs/conf/managed-schema
@@ -109,4 +109,9 @@
   <field name="message_md5" type="string" multiValued="false"/>
   <field name="type" type="key_lower_case" multiValued="false"/>
   
+  <dynamicField name='ngram_*' type="n_gram" multiValued="false" stored="true"/>
+  <dynamicField name='std_*' type="text_std_token_lower_case" multiValued="false" stored="true"/>
+  <dynamicField name='key_*' type="key_lower_case" multiValued="false" stored="true"/>
+  <dynamicField name="ws_*" type="text_ws" multiValued="false" omitNorms="false" stored="true"/>
+  
 </schema>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
index b13768f..fe31e6d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/LogSearchConstants.java
@@ -41,7 +41,7 @@ public class LogSearchConstants {
   public static final String I_E_SEPRATOR = "\\|i\\:\\:e\\|";
 
   //SUFFIX
-  public static final String NGRAM_SUFFIX = "ngram_";
+  public static final String NGRAM_PREFIX = "ngram_";
 
   //Date Format for SOLR
   public static final String SOLR_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss,SSS";

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 959d6f4..2fbdb46 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -67,7 +67,7 @@ public class AuditSolrDao extends SolrDaoBase {
 
     try {
       solrCollectionDao.checkSolrStatus(getSolrClient());
-      boolean createAlias = (aliasNameIn != null && !StringUtils.isBlank(rangerAuditCollection));
+      boolean createAlias = (aliasNameIn != null && StringUtils.isNotBlank(rangerAuditCollection));
       solrCollectionDao.setupCollections(getSolrClient(), solrAuditLogPropsConfig);
       if (createAlias) {
         solrAliasDao.setupAlias(solrSchemaFieldDao, getSolrClient(), solrAuditLogPropsConfig);

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrCollectionDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrCollectionDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrCollectionDao.java
index 1cbac31..c83cd99 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrCollectionDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrCollectionDao.java
@@ -30,6 +30,7 @@ import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.ROUTER_FIELD;
 
 import javax.inject.Named;
 import java.io.IOException;
@@ -43,7 +44,6 @@ public class SolrCollectionDao {
 
   private static final Logger LOG = LoggerFactory.getLogger(SolrCollectionDao.class);
 
-  private static final String ROUTER_FIELD = "_router_field_";
   private static final int SETUP_RETRY_SECOND = 30;
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java
index f16dc41..35bc2dc 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrSchemaFieldDao.java
@@ -25,11 +25,13 @@ import org.apache.ambari.logsearch.conf.SolrUserPropsConfig;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.LukeRequest;
+import org.apache.solr.client.solrj.request.schema.FieldTypeDefinition;
 import org.apache.solr.client.solrj.request.schema.SchemaRequest;
+import org.apache.solr.client.solrj.response.LukeResponse;
+import org.apache.solr.client.solrj.response.LukeResponse.FieldInfo;
 import org.apache.solr.client.solrj.response.schema.SchemaResponse;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.codehaus.jettison.json.JSONArray;
 import org.codehaus.jettison.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -37,15 +39,17 @@ import org.slf4j.LoggerFactory;
 import javax.inject.Inject;
 import java.io.IOException;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 
 public class SolrSchemaFieldDao {
 
   private static final Logger LOG = LoggerFactory.getLogger(SolrSchemaFieldDao.class);
 
   private static final int SETUP_RETRY_SECOND = 30;
-  private static final int SETUP_UPDATE_SECOND = 10 * 60; // 10 min
-
+  private static final int SETUP_UPDATE_SECOND = 1 * 60; // 1 min
+  
   private boolean populateFieldsThreadActive = false;
 
   private Map<String, String> schemaFieldNameMap = new HashMap<>();
@@ -93,73 +97,84 @@ public class SolrSchemaFieldDao {
    * Called from the thread. Don't call this directly
    */
   private boolean _populateSchemaFields(CloudSolrClient solrClient, SolrPropsConfig solrPropsConfig) {
-    SolrRequest<SchemaResponse> request = new SchemaRequest();
-    request.setMethod(SolrRequest.METHOD.GET);
-    request.setPath("/schema");
     String historyCollection = solrUserPropsConfig.getCollection();
     if (solrClient != null && !solrPropsConfig.getCollection().equals(historyCollection)) {
-      NamedList<Object> namedList = null;
+      LukeResponse lukeResponse = null;
+      SchemaResponse schemaResponse = null;
       try {
-        namedList = solrClient.request(request);
-        LOG.debug("populateSchemaFields() collection=" + solrPropsConfig.getCollection() + ", fields=" + namedList);
+        LukeRequest lukeRequest = new LukeRequest();
+        lukeRequest.setNumTerms(0);
+        lukeResponse = lukeRequest.process(solrClient);
+        
+        SolrRequest<SchemaResponse> schemaRequest = new SchemaRequest();
+        schemaRequest.setMethod(SolrRequest.METHOD.GET);
+        schemaRequest.setPath("/schema");
+        schemaResponse = schemaRequest.process(solrClient);
+        
+        LOG.debug("populateSchemaFields() collection=" + solrPropsConfig.getCollection() + ", luke=" + lukeResponse +
+            ", schema= " + schemaResponse);
       } catch (SolrException | SolrServerException | IOException e) {
         LOG.error("Error occured while popuplating field. collection=" + solrPropsConfig.getCollection(), e);
       }
 
-      if (namedList != null) {
-        extractSchemaFieldsName(namedList.toString(), schemaFieldNameMap, schemaFieldTypeMap);
+      if (lukeResponse != null && schemaResponse != null) {
+        extractSchemaFieldsName(lukeResponse, schemaResponse);
         return true;
       }
     }
     return false;
   }
 
-  public void extractSchemaFieldsName(String responseString,
-                                      final Map<String, String> schemaFieldsNameMap,
-                                      final Map<String, String> schemaFieldTypeMap) {
+  private void extractSchemaFieldsName(LukeResponse lukeResponse, SchemaResponse schemaResponse) {
     try {
-      JSONObject jsonObject = new JSONObject(responseString);
-      JSONObject schemajsonObject = jsonObject.getJSONObject("schema");
-      JSONArray jsonArrayList = schemajsonObject.getJSONArray("fields");
-      JSONArray fieldTypeJsonArray = schemajsonObject
-        .getJSONArray("fieldTypes");
-      if (jsonArrayList == null) {
-        return;
-      }
-      if (fieldTypeJsonArray == null) {
-        return;
-      }
+      HashMap<String, String> _schemaFieldNameMap = new HashMap<>();
       HashMap<String, String> _schemaFieldTypeMap = new HashMap<>();
-      HashMap<String, String> _schemaFieldsNameMap = new HashMap<String, String>();
-      for (int i = 0; i < fieldTypeJsonArray.length(); i++) {
-        JSONObject typeObject = fieldTypeJsonArray.getJSONObject(i);
-        String name = typeObject.getString("name");
-        String fieldTypeJson = typeObject.toString();
+      
+      for (Entry<String, FieldInfo> e : lukeResponse.getFieldInfo().entrySet()) {
+        String name = e.getKey();
+        String type = e.getValue().getType();
+        if (!name.contains("@") && !name.startsWith("_") && !name.contains("_md5") && !name.contains("_ms") &&
+          !name.contains(LogSearchConstants.NGRAM_PREFIX) && !name.contains("tags") && !name.contains("_str")) {
+          _schemaFieldNameMap.put(name, type);
+        }
+      }
+      
+      List<FieldTypeDefinition> fieldTypes = schemaResponse.getSchemaRepresentation().getFieldTypes();
+      for (FieldTypeDefinition fieldType : fieldTypes) {
+        Map<String, Object> fieldAttributes = fieldType.getAttributes();
+        String name = (String) fieldAttributes.get("name");
+        String fieldTypeJson = new JSONObject(fieldAttributes).toString();
         _schemaFieldTypeMap.put(name, fieldTypeJson);
       }
-      for (int i = 0; i < jsonArrayList.length(); i++) {
-        JSONObject explrObject = jsonArrayList.getJSONObject(i);
-        String name = explrObject.getString("name");
-        String type = explrObject.getString("type");
+      
+      List<Map<String, Object>> fields = schemaResponse.getSchemaRepresentation().getFields();
+      for (Map<String, Object> field : fields) {
+        String name = (String) field.get("name");
+        String type = (String) field.get("type");
         if (!name.contains("@") && !name.startsWith("_") && !name.contains("_md5") && !name.contains("_ms") &&
-          !name.contains(LogSearchConstants.NGRAM_SUFFIX) && !name.contains("tags") && !name.contains("_str")) {
-          _schemaFieldsNameMap.put(name, type);
+          !name.contains(LogSearchConstants.NGRAM_PREFIX) && !name.contains("tags") && !name.contains("_str")) {
+          _schemaFieldNameMap.put(name, type);
         }
       }
-      schemaFieldsNameMap.clear();
-      schemaFieldTypeMap.clear();
-      schemaFieldsNameMap.putAll(_schemaFieldsNameMap);
-      schemaFieldTypeMap.putAll(_schemaFieldTypeMap);
+      
+      if (_schemaFieldNameMap.isEmpty() || _schemaFieldTypeMap.isEmpty()) {
+        return;
+      }
+      
+      synchronized (this) {
+        schemaFieldNameMap = _schemaFieldNameMap;
+        schemaFieldTypeMap = _schemaFieldTypeMap;
+      }
     } catch (Exception e) {
       LOG.error(e + "Credentials not specified in logsearch.properties " + MessageEnums.ERROR_SYSTEM);
     }
   }
 
-  public Map<String, String> getSchemaFieldTypeMap() {
-    return schemaFieldTypeMap;
+  public synchronized Map<String, String> getSchemaFieldNameMap() {
+    return schemaFieldNameMap;
   }
 
-  public Map<String, String> getSchemaFieldNameMap() {
-    return schemaFieldNameMap;
+  public synchronized Map<String, String> getSchemaFieldTypeMap() {
+    return schemaFieldTypeMap;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
index 912442d..63cc89e 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserDao.java
@@ -135,7 +135,7 @@ public class UserDao {
       String encPassword = user.get(ENC_PASSWORD);
       String username = user.get(USER_NAME);
       String password = user.get(PASSWORD);
-      if (!StringUtils.isBlank(password)) {
+      if (StringUtils.isNotBlank(password)) {
         encPassword = CommonUtil.encryptPassword(username, password);
         user.put(PASSWORD, "");
         user.put(ENC_PASSWORD, encPassword);

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
index 85b5fda..741c523 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/graph/GraphDataGenerator.java
@@ -249,13 +249,13 @@ public class GraphDataGenerator {
           NodeData hostNode = new NodeData();
           String name = (pivotHost.getValue() == null ? "" : "" + pivotHost.getValue());
           String value = "" + pivotHost.getCount();
-          if (!StringUtils.isBlank(name)) {
+          if (StringUtils.isNotBlank(name)) {
             hostNode.setName(name);
           }
-          if (!StringUtils.isBlank(value)) {
+          if (StringUtils.isNotBlank(value)) {
             hostNode.setValue(value);
           }
-          if (!StringUtils.isBlank(firstPriority)) {
+          if (StringUtils.isNotBlank(firstPriority)) {
             hostNode.setType(firstPriority);
           }
 
@@ -263,7 +263,7 @@ public class GraphDataGenerator {
           hostNode.setRoot(true);
           PivotField hostPivot = null;
           for (PivotField searchHost : secondHirarchicalPivotFields) {
-            if (!StringUtils.isBlank(hostNode.getName())
+            if (StringUtils.isNotBlank(hostNode.getName())
               && hostNode.getName().equals(searchHost.getValue())) {
               hostPivot = searchHost;
               break;
@@ -291,7 +291,7 @@ public class GraphDataGenerator {
                 NodeData compNode = new NodeData();
                 String compName = (pivotComp.getValue() == null ? "" : "" + pivotComp.getValue());
                 compNode.setName(compName);
-                if (!StringUtils.isBlank(secondPriority)) {
+                if (StringUtils.isNotBlank(secondPriority)) {
                   compNode.setType(secondPriority);
                 }
                 compNode.setValue("" + pivotComp.getCount());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
index 45d66cf..6c280ac 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
@@ -32,6 +32,7 @@ import org.apache.ambari.logsearch.model.response.LogSearchResponse;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
 import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -110,7 +111,7 @@ public abstract class ManagerBase<LOG_DATA_TYPE extends LogData, SEARCH_RESPONSE
     SEARCH_RESPONSE logResponse = createLogSearchResponse();
     SolrDocumentList docList = response.getResults();
     List<LOG_DATA_TYPE> serviceLogDataList = convertToSolrBeans(response);
-    if (docList != null && !docList.isEmpty()) {
+    if (CollectionUtils.isNotEmpty(docList)) {
       logResponse.setLogList(serviceLogDataList);
       logResponse.setStartIndex((int) docList.getStart());
       logResponse.setTotalCount(docList.getNumFound());

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
index d276769..74c549a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
@@ -118,7 +118,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     String keyword = request.getKeyWord();
     Boolean isLastPage = request.isLastPage();
     SimpleQuery solrQuery = conversionService.convert(request, SimpleQuery.class);
-    if (!StringUtils.isBlank(keyword)) {
+    if (StringUtils.isNotBlank(keyword)) {
       try {
         return (ServiceLogResponse) getPageByKeyword(request, event);
       } catch (SolrException | SolrServerException e) {
@@ -170,7 +170,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     SimpleFacetQuery facetQuery = conversionService.convert(request, SimpleFacetQuery.class);
     SolrQuery solrQuery = new DefaultQueryParser().doConstructSolrQuery(facetQuery);
     String hostName = request.getHostName() == null ? "" : request.getHostName();
-    if (!StringUtils.isBlank(hostName)){
+    if (StringUtils.isNotBlank(hostName)){
       solrQuery.addFilterQuery(String.format("%s:*%s*", HOST, hostName));
     }
     QueryResponse response = serviceLogsSolrDao.process(solrQuery, "/service/logs/tree");
@@ -187,7 +187,7 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
 
     NodeListResponse list = new NodeListResponse();
     String componentName = request.getComponentName() == null ? "" : request.getComponentName();
-    if (!StringUtils.isBlank(componentName)){
+    if (StringUtils.isNotBlank(componentName)){
       solrQuery.addFilterQuery(COMPONENT + ":"
         + componentName);
       QueryResponse response = serviceLogsSolrDao.process(solrQuery, "/service/logs/hosts/components");

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
index 00ae332..03535b1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/UserConfigManager.java
@@ -137,10 +137,10 @@ public class UserConfigManager extends JsonManagerBase {
   }
 
   private boolean isValid(UserConfigData vHistory) {
-    return !StringUtils.isBlank(vHistory.getFiltername())
-        && !StringUtils.isBlank(vHistory.getRowType())
-        && !StringUtils.isBlank(vHistory.getUserName())
-        && !StringUtils.isBlank(vHistory.getValues());
+    return StringUtils.isNotBlank(vHistory.getFiltername())
+        && StringUtils.isNotBlank(vHistory.getRowType())
+        && StringUtils.isNotBlank(vHistory.getUserName())
+        && StringUtils.isNotBlank(vHistory.getValues());
   }
 
   public void deleteUserConfig(String id) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
index e2bba3f..21bfc5a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/response/CommonLogData.java
@@ -18,10 +18,12 @@
  */
 package org.apache.ambari.logsearch.model.response;
 
+import com.fasterxml.jackson.annotation.JsonAnyGetter;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 import java.util.Date;
+import java.util.Map;
 
 @JsonIgnoreProperties(ignoreUnknown = true)
 public interface CommonLogData extends LogData {
@@ -82,14 +84,14 @@ public interface CommonLogData extends LogData {
   void setEventCount(Long eventCount);
 
   @JsonProperty("event_md5")
-  public String getEventMd5();
+  String getEventMd5();
 
-  public void setEventMd5(String eventMd5);
+  void setEventMd5(String eventMd5);
 
   @JsonProperty("event_dur_ms")
-  public Long getEventDurationMs();
+  Long getEventDurationMs();
 
-  public void setEventDurationMs(Long eventDurationMs);
+  void setEventDurationMs(Long eventDurationMs);
 
   @JsonProperty("_ttl_")
   String getTtl();
@@ -110,4 +112,7 @@ public interface CommonLogData extends LogData {
   Integer getRouterField();
 
   void setRouterField(Integer routerField);
+  
+  @JsonAnyGetter
+  Map<String, Object> getAllDynamicFields();
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
index 0fd65a3..9de05db 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrAuditLogData.java
@@ -23,7 +23,6 @@ import org.apache.solr.client.solrj.beans.Field;
 
 import java.util.Date;
 import java.util.List;
-import java.util.Map;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.*;
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
index bac5080..b5ce7ad 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrCommonLogData.java
@@ -22,8 +22,13 @@ import org.apache.ambari.logsearch.model.response.CommonLogData;
 import org.apache.solr.client.solrj.beans.Field;
 
 import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.CommonLogConstants.*;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.KEY_DYNAMIC_FIELDS;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.STORED_TOKEN_DYNAMIC_FIELDS;
+import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.WS_DYNAMIC_FIELDS;
 
 public class SolrCommonLogData implements CommonLogData {
 
@@ -78,6 +83,15 @@ public class SolrCommonLogData implements CommonLogData {
   @Field(ROUTER_FIELD)
   private Integer routerField;
 
+  @Field(STORED_TOKEN_DYNAMIC_FIELDS)
+  private Map<String, Object> stdDynamicFields;
+
+  @Field(KEY_DYNAMIC_FIELDS)
+  private Map<String, Object> keyDynamicFields;
+
+  @Field(WS_DYNAMIC_FIELDS)
+  private Map<String, Object> wsDynamicFields;
+
   @Override
   public String getId() {
     return this.id;
@@ -247,4 +261,32 @@ public class SolrCommonLogData implements CommonLogData {
   public void setType(String type) {
     this.type = type;
   }
+
+  @Override
+  public Map<String, Object> getAllDynamicFields() {
+    Map<String, Object> allDynamicFields = new HashMap<>();
+    if (stdDynamicFields != null) {
+      allDynamicFields.putAll(stdDynamicFields);
+    }
+    if (keyDynamicFields != null) {
+      allDynamicFields.putAll(keyDynamicFields);
+    }
+    if (wsDynamicFields != null) {
+      allDynamicFields.putAll(wsDynamicFields);
+    }
+    
+    return allDynamicFields;
+  }
+
+  public void setStdDynamicFields(Map<String, Object> stdDynamicFields) {
+    this.stdDynamicFields = stdDynamicFields;
+  }
+
+  public void setKeyDynamicFields(Map<String, Object> keyDynamicFields) {
+    this.keyDynamicFields = keyDynamicFields;
+  }
+
+  public void setWsDynamicFields(Map<String, Object> wsDynamicFields) {
+    this.wsDynamicFields = wsDynamicFields;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
index 1d4cecf..c6fdba3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/model/SolrServiceLogData.java
@@ -22,7 +22,6 @@ import org.apache.ambari.logsearch.model.response.ServiceLogData;
 import org.apache.solr.client.solrj.beans.Field;
 
 import java.util.Date;
-import java.util.Map;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.ServiceLogConstants.*;
 
@@ -49,15 +48,6 @@ public class SolrServiceLogData extends SolrCommonLogData implements ServiceLogD
   @Field(HOST)
   private String host;
 
-  @Field(STORED_TOKEN_DYNAMIC_FIELDS)
-  private Map<String, Object> stdDynamicFields;
-
-  @Field(KEY_DYNAMIC_FIELDS)
-  private Map<String, Object> keyDynamicFields;
-
-  @Field(WS_DYNAMIC_FIELDS)
-  private Map<String, Object> wsDynamicFields;
-
   @Override
   public String getPath() {
     return path;
@@ -127,28 +117,4 @@ public class SolrServiceLogData extends SolrCommonLogData implements ServiceLogD
   public void setLevel(String level) {
     this.level = level;
   }
-
-  public Map<String, Object> getStdDynamicFields() {
-    return stdDynamicFields;
-  }
-
-  public void setStdDynamicFields(Map<String, Object> stdDynamicFields) {
-    this.stdDynamicFields = stdDynamicFields;
-  }
-
-  public Map<String, Object> getKeyDynamicFields() {
-    return keyDynamicFields;
-  }
-
-  public void setKeyDynamicFields(Map<String, Object> keyDynamicFields) {
-    this.keyDynamicFields = keyDynamicFields;
-  }
-
-  public Map<String, Object> getWsDynamicFields() {
-    return wsDynamicFields;
-  }
-
-  public void setWsDynamicFields(Map<String, Object> wsDynamicFields) {
-    this.wsDynamicFields = wsDynamicFields;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
index 6369107..7a856d2 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/util/SolrUtil.java
@@ -28,11 +28,13 @@ import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
 import org.apache.lucene.analysis.path.PathHierarchyTokenizerFactory;
 import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.schema.TrieDoubleField;
 import org.apache.solr.schema.TrieFloatField;
 import org.apache.solr.schema.TrieIntField;
 import org.apache.solr.schema.TrieLongField;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 
 public class SolrUtil {
@@ -116,18 +118,7 @@ public class SolrUtil {
     return search;
   }
 
-  public static String makeSolrSearchString(String search) {
-    String newString = search.trim();
-    String newSearch = newString.replaceAll("(?=[]\\[+&|!(){},:\"^~/=$@%?:.\\\\])", "\\\\");
-    newSearch = newSearch.replace("\n", "*");
-    newSearch = newSearch.replace("\t", "*");
-    newSearch = newSearch.replace("\r", "*");
-    newSearch = newSearch.replace("**", "*");
-    newSearch = newSearch.replace("***", "*");
-    return "*" + newSearch + "*";
-  }
-
-  public static String makeSolrSearchStringWithoutAsterisk(String search) {
+  private static String makeSolrSearchStringWithoutAsterisk(String search) {
     String newString = search.trim();
     String newSearch = newString.replaceAll("(?=[]\\[+&|!(){}^\"~=/$@%?:.\\\\])", "\\\\");
     newSearch = newSearch.replace("\n", "*");
@@ -161,28 +152,19 @@ public class SolrUtil {
   }
   
 
-  public static boolean isSolrFieldNumber(String fieldType, Map<String, String> schemaFieldsMap) {
+  private static boolean isSolrFieldNumber(String fieldType, Map<String, String> schemaFieldsMap) {
     if (StringUtils.isBlank(fieldType)) {
       return false;
     } else {
       HashMap<String, Object> typeInfoMap = getFieldTypeInfoMap(fieldType, schemaFieldsMap);
-      if (typeInfoMap == null || typeInfoMap.isEmpty()) {
+      if (MapUtils.isEmpty(typeInfoMap)) {
         return false;
       }
       String fieldTypeClassName = (String) typeInfoMap.get("class");
-      if (fieldTypeClassName.equalsIgnoreCase(TrieIntField.class.getSimpleName())) {
-        return true;
-      }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieDoubleField.class.getSimpleName())) {
-        return true;
-      }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieFloatField.class.getSimpleName())) {
-        return true;
-      }
-      if (fieldTypeClassName.equalsIgnoreCase(TrieLongField.class.getSimpleName())) {
-        return true;
-      }
-      return false;
+      return fieldTypeClassName.equalsIgnoreCase(TrieIntField.class.getSimpleName()) ||
+             fieldTypeClassName.equalsIgnoreCase(TrieDoubleField.class.getSimpleName()) ||
+             fieldTypeClassName.equalsIgnoreCase(TrieFloatField.class.getSimpleName()) ||
+             fieldTypeClassName.equalsIgnoreCase(TrieLongField.class.getSimpleName());
     }
   }
 
@@ -237,7 +219,7 @@ public class SolrUtil {
     }
   }
   
-  public static HashMap<String, Object> getFieldTypeInfoMap(String fieldType, Map<String, String> schemaFieldsTypeMap) {
+  private static HashMap<String, Object> getFieldTypeInfoMap(String fieldType, Map<String, String> schemaFieldsTypeMap) {
     String fieldTypeMetaData = schemaFieldsTypeMap.get(fieldType);
     HashMap<String, Object> fieldTypeMap = JSONUtil.jsonToMapObject(fieldTypeMetaData);
     if (fieldTypeMap == null) {
@@ -273,7 +255,7 @@ public class SolrUtil {
     setFacetLimit(solrQuery, -1);
   }
 
-  public static void setFacetLimit(SolrQuery solrQuery, int limit) {
+  private static void setFacetLimit(SolrQuery solrQuery, int limit) {
     solrQuery.set("facet.limit", limit);
   }
 
@@ -286,22 +268,20 @@ public class SolrUtil {
     }
   }
 
-  private static boolean checkTokenizer(String fieldType, Class tokenizerFactoryClass, Map<String, String> schemaFieldsMap) {
+  @SuppressWarnings("unchecked")
+  private static boolean checkTokenizer(String fieldType, Class<? extends TokenizerFactory> tokenizerFactoryClass,
+      Map<String, String> schemaFieldsMap) {
     HashMap<String, Object> fieldTypeMap = SolrUtil.getFieldTypeInfoMap(fieldType ,schemaFieldsMap);
     HashMap<String, Object> analyzer = (HashMap<String, Object>) fieldTypeMap.get("analyzer");
-    if (analyzer != null) {
-      HashMap<String, Object> tokenizerMap = (HashMap<String, Object>) analyzer.get("tokenizer");
-      if (tokenizerMap != null) {
-        String tokenizerClass = (String) tokenizerMap.get("class");
-        if (!StringUtils.isEmpty(tokenizerClass)) {
-          tokenizerClass =tokenizerClass.replace("solr.", "");
-          if (tokenizerClass.equalsIgnoreCase(tokenizerFactoryClass
-            .getSimpleName())) {
-            return true;
-          }
-        }
+    HashMap<String, Object> tokenizerMap = (HashMap<String, Object>)MapUtils.getObject(analyzer, "tokenizer");
+    if (tokenizerMap != null) {
+      String tokenizerClass = (String) tokenizerMap.get("class");
+      if (StringUtils.isNotEmpty(tokenizerClass)) {
+        tokenizerClass = tokenizerClass.replace("solr.", "");
+        return tokenizerClass.equalsIgnoreCase(tokenizerFactoryClass.getSimpleName());
       }
     }
+    
     return false;
   }
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
index 29fd5b2..808320d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/filters/LogsearchKRBAuthenticationFilter.java
@@ -148,7 +148,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
       HttpServletResponse response) throws IOException, ServletException {
     logger.debug("LogsearchKRBAuthenticationFilter private filter");
     String userName = getUsernameFromResponse(response);
-    if (!StringUtils.isEmpty(userName)) {
+    if (StringUtils.isNotEmpty(userName)) {
       Authentication existingAuth = SecurityContextHolder.getContext()
           .getAuthentication();
       if (existingAuth == null || !existingAuth.isAuthenticated()) {
@@ -199,7 +199,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
       KerberosName.setRules(PropertiesHelper.getProperty(NAME_RULES, "DEFAULT"));
       String userName = getUsernameFromRequest(httpRequest);
       if ((existingAuth == null || !existingAuth.isAuthenticated())
-          && (!StringUtils.isEmpty(userName))) {
+          && (StringUtils.isNotEmpty(userName))) {
         // --------------------------- To Create Logsearch Session--------------------------------------
         // if we get the userName from the token then log into logsearch using the same user
         final List<GrantedAuthority> grantedAuths = new ArrayList<>();
@@ -236,8 +236,8 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
       String keytab = PropertiesHelper.getProperty(KEYTAB);
       String principal = PropertiesHelper.getProperty(PRINCIPAL);
       String hostname = PropertiesHelper.getProperty(HOST_NAME);
-      if (!StringUtils.isEmpty(keytab) && !StringUtils.isEmpty(principal)
-          && !StringUtils.isEmpty(hostname)) {
+      if (StringUtils.isNotEmpty(keytab) && StringUtils.isNotEmpty(principal)
+          && StringUtils.isNotEmpty(hostname)) {
         spnegoEnable = true;
       }
     }
@@ -306,7 +306,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
         Iterator<String> iterator = cookiesCollection.iterator();
         while (iterator.hasNext()) {
           String cookie = iterator.next();
-          if (!StringUtils.isEmpty(cookie)) {
+          if (StringUtils.isNotEmpty(cookie)) {
             if (cookie.toLowerCase().startsWith(AUTH_COOKIE_NAME.toLowerCase())) {
               Matcher m = usernamePattern.matcher(cookie);
               if (m.find()) {
@@ -314,7 +314,7 @@ public class LogsearchKRBAuthenticationFilter extends LogsearchKrbFilter {
               }
             }
           }
-          if (!StringUtils.isEmpty(userName)) {
+          if (StringUtils.isNotEmpty(userName)) {
             break;
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapProperties.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapProperties.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapProperties.java
index 2a1b4ee..82e71fe 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapProperties.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/web/security/LdapProperties.java
@@ -63,7 +63,7 @@ public class LdapProperties {
     } else {
       List<String> list = new ArrayList<String>();
       list.add(protocol + primaryUrl);
-      if (!StringUtils.isEmpty(secondaryUrl)) {
+      if (StringUtils.isNotEmpty(secondaryUrl)) {
         list.add(protocol + secondaryUrl);
       }
       return list;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
index 4e09e88..7710156 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/audit/AuditTabLayoutView.js
@@ -312,7 +312,7 @@ define(['require',
           if (columns[name] === undefined && value != "_version_") {
             var columnObj = {
               name: Globals.invertedAuditLogMappings[value],
-              label:value,
+              label: value,
               cell: "String",
               sortType: 'toggle',
               editable: false

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/static/schema_fields.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/static/schema_fields.json b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/static/schema_fields.json
index 7b1a9a9..55f1d3c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/static/schema_fields.json
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/static/schema_fields.json
@@ -65,7 +65,24 @@
     "text": "Text",
     "type": "Type",
     "ugi": "UGI",
-    "reqUser": "User"
+    "reqUser": "User",
+    
+    "ws_base_url": "Base URL",
+    "ws_command": "Command",
+    "ws_component": "Component",
+    "ws_details": "Details",
+    "ws_display_name": "Display Name",
+    "ws_os": "OS",
+    "ws_repo_id": "Repo Id",
+    "ws_repo_version": "Repo Version",
+    "ws_repositories": "Repositories",
+    "ws_result_status": "Result Status",
+    "ws_roles": "Roles",
+    "ws_stack_version": "Stack Version",
+    "ws_stack": "Stack",
+    "ws_status": "Status",
+    "ws_version_note": "Version Note",
+    "ws_version_number": "Version Number"
   },
   "auditLogExcludes": ["tags","tags_str", "seq_num"]
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
index f930ec9..879b786 100644
--- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
+++ b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
@@ -22,7 +22,8 @@ logfeeder.config.files=shipper-conf/global.config.json,\
   shipper-conf/input.config-hst.json,\
   shipper-conf/input.config-system_message.json,\
   shipper-conf/input.config-secure_log.json,\
-  shipper-conf/input.config-hdfs.json
+  shipper-conf/input.config-hdfs.json,\
+  shipper-conf/input.config-ambari.json
 logfeeder.log.filter.enable=true
 logfeeder.solr.config.interval=5
 logfeeder.solr.core.config.name=history

http://git-wip-us.apache.org/repos/asf/ambari/blob/8073a1bc/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json
new file mode 100644
index 0000000..576bfa8
--- /dev/null
+++ b/ambari-logsearch/docker/test-config/logfeeder/shipper-conf/input.config-ambari.json
@@ -0,0 +1,585 @@
+{
+  "input":[
+    {
+      "type":"ambari_agent",
+      "rowtype":"service",
+      "path":"/root/test-logs/ambari-server/ambari-agent.log"
+    },
+    {
+      "type":"ambari_server",
+      "rowtype":"service",
+      "path":"/root/test-logs/ambari-server/ambari-server.log"
+    },
+    {
+      "type":"ambari_alerts",
+      "rowtype":"service",
+      "add_fields":{
+        "level":"INFO"
+      },
+      "path":"/root/test-logs/ambari-server/ambari-alerts.log"
+    },
+    {
+      "type":"ambari_config_changes",
+      "rowtype":"service",
+      "path":"/root/test-logs/ambari-server/ambari-config-changes.log"
+    },
+    {
+      "type":"ambari_eclipselink",
+      "rowtype":"service",
+      "path":"/root/test-logs/ambari-server/ambari-eclipselink.log"
+    },
+    {
+      "type":"ambari_server_check_database",
+      "rowtype":"service",
+      "path":"/root/test-logs/ambari-server/ambari-server-check-database.log"
+    },
+    {
+      "type":"ambari_audit",
+      "rowtype":"audit",
+      "add_fields":{
+        "logType":"AmbariAudit",
+        "enforcer":"ambari-acl",
+        "repoType":"1",
+        "repo":"ambari",
+        "level":"INFO"
+      },
+      "path":"/root/test-logs/ambari-server/ambari-audit.log"
+    }
+
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_agent"
+          ]
+
+        }
+
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_server"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_alerts"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_config_changes"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_eclipselink"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(\\[EL%{SPACE}%{LOGLEVEL:level}\\])",
+      "message_pattern":"(?m)^\\[EL%{SPACE}%{LOGLEVEL:level}\\]:%{SPACE}%{TIMESTAMP_ISO8601:logtime}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+          }
+
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"Warning",
+            "post_value":"Warn"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_server_check_database"
+          ]
+          
+        }
+        
+      },
+      "log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_audit"
+          ]
+
+        }
+
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime},%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd'T'HH:mm:ss.SSSXX"
+          }
+
+        }
+
+      }
+
+    },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "ambari_audit"
+          ]
+
+        }
+
+      },
+      "source_field":"log_message",
+      "field_split":", ",
+      "value_borders":"()",
+      "post_map_values":{
+        "User":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "Hostname":{
+          "map_fieldname":{
+            "new_fieldname":"host"
+          }
+        },
+        "Host name":{
+          "map_fieldname":{
+            "new_fieldname":"host"
+          }
+        },
+        "RemoteIp":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+        },
+        "RequestType":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+        },
+        "RequestId":{
+          "map_fieldname":{
+            "new_fieldname":"request_id"
+          }
+        },
+        "TaskId":{
+          "map_fieldname":{
+            "new_fieldname":"task_id"
+          }
+        },
+        "Operation":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+        },
+        "url":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "ResourcePath":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "Cluster name":{
+          "map_fieldname":{
+            "new_fieldname":"cluster"
+          }
+        },
+        "Reason":{
+          "map_fieldname":{
+            "new_fieldname":"reason"
+          }
+        },
+        "Base URL":{
+          "map_fieldname":{
+            "new_fieldname":"ws_base_url"
+          }
+        },
+        "Command":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_command"
+          }
+        },
+        "Component":{
+          "map_fieldname":{
+            "new_fieldname":"ws_component"
+          }
+        },
+        "Details":{
+          "map_fieldname":{
+            "new_fieldname":"ws_details"
+          }
+        },
+        "Display name":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_display_name"
+          }
+        },
+        "OS":{
+          "map_fieldname":{
+            "new_fieldname":"ws_os"
+          }
+        },
+        "Repo id":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_id"
+          }
+        },
+        "Repo version":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_repo_version"
+          }
+        },
+        "Repositories":{
+          "map_fieldname":{
+            "new_fieldname":"ws_repositories"
+          }
+        },
+        "Roles":{
+          "map_fieldname":{
+            "new_fieldname":"ws_roles"
+          }
+        },
+        "Stack":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack"
+          }
+        },
+        "Stack version":{
+          "map_fieldname":{
+            "new_fieldname":"ws_stack_version"
+          }
+        },
+        "VersionNote":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_note"
+          }
+        },
+        "VersionNumber":{
+          "map_fieldvalue":{
+            "pre_value":"null",
+            "post_value":"unknown"
+          },
+          "map_fieldname":{
+            "new_fieldname":"ws_version_number"
+          }
+        },
+        "Status":[
+         {
+           "map_fieldcopy":{
+             "copy_name": "ws_status"
+           }
+         },
+         {
+            "map_fieldvalue":{
+              "pre_value":"Success",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Successfully queued",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"QUEUED",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"PENDING",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"COMPLETED",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"IN_PROGRESS",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Failed",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"Failed to queue",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING_FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"HOLDING_TIMEDOUT",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"TIMEDOUT",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"ABORTED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"SKIPPED_FAILED",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ],
+        "ResultStatus":[
+          {
+            "map_fieldcopy":{
+              "copy_name": "ws_result_status"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"200 OK",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"201 Created",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"202 Accepted",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"400 Bad Request",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"401 Unauthorized",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"403 Forbidden",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"404 Not Found",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"409 Resource Conflict",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"500 Internal Server Error",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ]
+
+      }
+
+    }
+
+  ]
+
+}


[4/6] ambari git commit: AMBARI-18548. Declarative Logsearch/Logfeeder Component Metadata for Stack Component (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
new file mode 100644
index 0000000..2f13d3f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Oozie</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>OOZIE_SERVER:oozie_app</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"oozie_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/oozie-env/oozie_log_dir', '/var/log/oozie')}}/oozie.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "oozie_app"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
new file mode 100644
index 0000000..7b7421d
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Ranger</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>RANGER_SERVER:ranger_server,ranger_dbpatch;RANGER_USERSYNC:ranger_usersync;</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"ranger_admin",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/xa_portal.log"
+    },
+    {
+      "type":"ranger_dbpatch",
+      "is_enabled":"true",
+      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/ranger_db_patch.log"
+    },
+    {
+      "type":"ranger_usersync",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ranger-env/ranger_usersync_log_dir', '/var/log/ranger/usersync')}}/usersync.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_admin",
+            "ranger_dbpatch"
+          ]
+         }
+       },
+      "log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_usersync"
+          ]
+         }
+       },
+      "log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
new file mode 100644
index 0000000..226d2b0
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Ranger KMS</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>RANGER_KMS:ranger_kms</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"ranger_kms",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kms-env/kms_log_dir', '/var/log/ranger/kms')}}/kms.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_kms"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
new file mode 100644
index 0000000..63201ef
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
@@ -0,0 +1,98 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Spark</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>SPARK_JOBHISTORYSERVER:spark_jobhistory_server;SPARK_THRIFTSERVER:spark_thriftserver;LIVY_SERVER:livy_server</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+   "input":[
+      {
+       "type":"spark_jobhistory_server",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
+     },
+     {
+       "type":"spark_thriftserver",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
+     },
+     {
+       "type":"livy_server",
+       "rowtype":"service",
+       "path":"{{default('/configurations/livy-env/livy_log_dir', '/var/log/livy')}}/livy-livy-server.out"
+     }
+   ],
+   "filter":[
+       {
+          "filter":"grok",
+          "conditions":{
+            "fields":{
+              "type":[
+                "spark_jobhistory_server",
+                "spark_thriftserver",
+                "livy_server"
+              ]
+             }
+          },
+          "log4j_format":"",
+          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
+          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
+          "post_map_values":{
+            "logtime":{
+              "map_date":{
+                "target_date_pattern":"yy/MM/dd HH:mm:ss"
+              }
+             },
+            "level":{
+              "map_fieldvalue":{
+                "pre_value":"WARNING",
+                "post_value":"WARN"
+              }
+             }
+           }
+      }
+   ]
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
new file mode 100644
index 0000000..3b00d51
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Spark2</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>SPARK2_JOBHISTORYSERVER:spark2_jobhistory_server;SPARK2_THRIFTSERVER:spark2_thriftserver</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+   "input":[
+      {
+       "type":"spark2_jobhistory_server",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
+     },
+     {
+       "type":"spark2_thriftserver",
+       "rowtype":"service",
+       "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
+     }
+   ],
+   "filter":[
+      {
+          "filter":"grok",
+          "conditions":{
+            "fields":{
+              "type":[
+                "spark2_jobhistory_server",
+                "spark2_thriftserver"
+              ]
+             }
+          },
+          "log4j_format":"",
+          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
+          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
+          "post_map_values":{
+            "logtime":{
+              "map_date":{
+                "target_date_pattern":"yy/MM/dd HH:mm:ss"
+              }
+             },
+            "level":{
+              "map_fieldvalue":{
+                "pre_value":"WARNING",
+                "post_value":"WARN"
+              }
+             }
+           }
+      }
+   ]
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
new file mode 100644
index 0000000..a080a20
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
@@ -0,0 +1,110 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Zookeeper</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>NIMBUS:storm_nimbus;SUPERVISOR:storm_supervisor,storm_worker,storm_logviewer;STORM_UI_SERVER:storm_ui;DRPC_SERVER:storm_drpc</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"storm_drpc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/drpc.log"
+    },
+    {
+      "type":"storm_logviewer",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/logviewer.log"
+    },
+    {
+      "type":"storm_nimbus",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/nimbus.log"
+    },
+    {
+      "type":"storm_supervisor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/supervisor.log"
+    },
+    {
+      "type":"storm_ui",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/ui.log"
+    },
+    {
+      "type":"storm_worker",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/*worker*.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "storm_drpc",
+            "storm_logviewer",
+            "storm_nimbus",
+            "storm_supervisor",
+            "storm_ui",
+            "storm_worker"
+          ]
+         }
+       },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
new file mode 100644
index 0000000..3c0abbf
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>MapReduce</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>HISTORYSERVER:mapred_historyserver</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"mapred_historyserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/mapred-env/mapred_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/mapred-env/mapred_user', 'mapred')}}/mapred-{{default('configurations/mapred-env/mapred_user', 'mapred')}}-historyserver*.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "mapred_historyserver"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
new file mode 100644
index 0000000..95cf0c9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
@@ -0,0 +1,104 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>YARN</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>RESOURCEMANAGER:yarn_resourcemanager,yarn_historyserver,yarn_jobsummary;NODEMANAGER:yarn_nodemanager;APP_TIMELINE_SERVER:yarn_timelineserver</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"yarn_nodemanager",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-nodemanager-*.log"
+    },
+    {
+      "type":"yarn_resourcemanager",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-resourcemanager-*.log"
+    },
+    {
+      "type":"yarn_timelineserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-timelineserver-*.log"
+    },
+    {
+      "type":"yarn_historyserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-historyserver-*.log"
+    },
+    {
+      "type":"yarn_jobsummary",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/hadoop-mapreduce.jobsummary.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "yarn_historyserver",
+            "yarn_jobsummary",
+            "yarn_nodemanager",
+            "yarn_resourcemanager",
+            "yarn_timelineserver"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
new file mode 100644
index 0000000..0b8ab7a
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Zeppelin</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>ZEPPELIN_MASTER:zeppelin</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"zeppelin",
+      "rowtype":"service",
+      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "zeppelin"
+          ]
+         }
+       },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
new file mode 100644
index 0000000..325af14
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
@@ -0,0 +1,76 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Zookeeper</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>ZOOKEEPER_SERVER:zookeeper</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+     "type":"zookeeper",
+     "rowtype":"service",
+     "path":"{{default('/configurations/zookeeper-env/zk_log_dir', '/var/log/zookeeper')}}/zookeeper*.log"
+    }
+  ],
+  "filter":[
+   {
+      "filter":"grok",
+      "conditions":{
+        "fields":{"type":["zookeeper"]}
+      },
+     "log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
+     "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+     "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+     "post_map_values": {
+       "logtime": {
+         "map_date":{
+           "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+         }
+       }
+     }
+    }
+   ]
+}
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index ba070fe..7a13637 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -62,7 +62,7 @@ class TestLogFeeder(RMFTestCase):
                               )
     self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
                               properties={'logfeeder.checkpoint.folder': '/etc/ambari-logsearch-logfeeder/conf/checkpoints',
-                                          'logfeeder.config.files': 'global.config.json,output.config.json,input.config-accumulo.json,input.config-ambari.json,input.config-ams.json,input.config-atlas.json,input.config-falcon.json,input.config-flume.json,input.config-hbase.json,input.config-hdfs.json,input.config-hive.json,input.config-hst.json,input.config-infra.json,input.config-kafka.json,input.config-knox.json,input.config-logsearch.json,input.config-nifi.json,input.config-oozie.json,input.config-ranger.json,input.config-spark.json,input.config-spark2.json,input.config-storm.json,input.config-yarn.json,input.config-zeppelin.json,input.config-zookeeper.json',
+                                          'logfeeder.config.files': 'global.config.json,output.config.json,input.config-ambari.json,input.config-logsearch.json,input.config-zookeeper.json',
                                           'logfeeder.metrics.collector.hosts': '',
                                           'logfeeder.solr.core.config.name': 'history',
                                           'logfeeder.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr'
@@ -80,9 +80,7 @@ class TestLogFeeder(RMFTestCase):
                               encoding='utf-8'
                               )
 
-    logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst',
-                                    'infra', 'kafka', 'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm',
-                                    'yarn', 'zeppelin', 'zookeeper']
+    logfeeder_supported_services = ['ambari','logsearch']
 
     logfeeder_config_file_names = ['global.config.json', 'output.config.json'] + \
                                   ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
@@ -91,6 +89,9 @@ class TestLogFeeder(RMFTestCase):
       self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/' + file_name,
                                 content=Template(file_name + ".j2")
                                 )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/input.config-zookeeper.json',
+                              content=InlineTemplate("pattern content")
+                              )
 
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/logfeeder.py",

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index 8c0ed16..6b4fab8 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -173,7 +173,6 @@ class TestLogSearch(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-
     self.configureResourcesCalled()
     self.assertNoMoreResources()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index a2c4791..c3eba53 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -229,6 +229,11 @@
       "zoo.cfg": {
         "clientPort": "2181"
       },
+      "zookeeper-logsearch-conf" : {
+        "service_name": "Zookeeper",
+        "component_mappings" : "ZOOKEEPER_SERVER:zookeeper",
+        "content" : "pattern content"
+      },
       "logsearch-admin-json" : {
         "logsearch_admin_username" : "admin",
         "logsearch_admin_password" : "admin",
@@ -314,9 +319,6 @@
       },
       "logfeeder-log4j": {
         "content": "&lt;?xml version=\"1.0\" encoding=\"UTF-8\" ?&gt;\n&lt;!--\n  Licensed to the Apache Software Foundation (ASF) under one or more\n  contributor license agreements.  See the NOTICE file distributed with\n  this work for additional information regarding copyright ownership.\n  The ASF licenses this file to You under the Apache License, Version 2.0\n  (the \"License\"); you may not use this file except in compliance with\n  the License.  You may obtain a copy of the License at\n\n      http://www.apache.org/licenses/LICENSE-2.0\n\n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n--&gt;\n&lt;!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\"&gt;\n&lt;log4j:configuration xmlns:log4j=\"http://jakarta.
 apache.org/log4j/\"&gt;\n  &lt;appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\"&gt;\n    &lt;param name=\"Target\" value=\"System.out\" /&gt;\n    &lt;layout class=\"org.apache.log4j.PatternLayout\"&gt;\n      &lt;param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\" /&gt;\n    &lt;/layout&gt;\n  &lt;/appender&gt;\n\n  &lt;appender name=\"rolling_file\" class=\"org.apache.log4j.RollingFileAppender\"&gt; \n    &lt;param name=\"file\" value=\"{{logfeeder_log_dir}}/logfeeder.log\" /&gt;\n    &lt;param name=\"append\" value=\"true\" /&gt; \n    &lt;param name=\"maxFileSize\" value=\"10MB\" /&gt; \n    &lt;param name=\"maxBackupIndex\" value=\"10\" /&gt; \n    &lt;layout class=\"org.apache.log4j.PatternLayout\"&gt; \n      &lt;param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\"/&gt; \n    &lt;/layout&gt; \n  &lt;/appender&gt; \n\n  &lt;category name=\"org.apache.ambari.logfeeder\" additivity=\"false\"&gt;\n    &lt;priori
 ty value=\"info\" /&gt;\n    &lt;appender-ref ref=\"rolling_file\" /&gt;\n  &lt;/category&gt;\n\n  &lt;root&gt;\n    &lt;priority value=\"warn\" /&gt;\n    &lt;appender-ref ref=\"rolling_file\" /&gt;\n  &lt;/root&gt;\n&lt;/log4j:configuration&gt;"
-      },
-      "logfeeder-input-configs": {
-        "content": "{\n  \"global\":{\n    \"add_fields\":{\n      \"cluster\":\"{{cluster_name}}\"\n    },\n    \"source\":\"file\",\n    \"tail\":\"true\",\n    \"gen_event_md5\":\"true\",\n    \"start_position\":\"beginning\"\n  },\n  \"input\":[\n    {\n      \"type\":\"accumulo_gc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/gc_*.log\"\n    },\n    {\n      \"type\":\"accumulo_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/master_*.log\"\n    },\n    {\n      \"type\":\"accumulo_monitor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/monitor_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tracer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tracer_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tserver_*.log\"\n    },\n    {\n      \"type\":\"atlas_app\",\n      \"rowtype\":\"se
 rvice\",\n      \"path\":\"{{atlas_log_dir}}/application.log\"\n    },\n    {\n      \"type\":\"ambari_agent\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_agent_log_dir}}/ambari-agent.log\"\n    },\n    {\n      \"type\":\"ambari_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_server_log_dir}}/ambari-server.log\"\n    },\n    {\n      \"type\":\"ams_hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-master-*.log\"\n    },\n    {\n      \"type\":\"ams_hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log\"\n    },\n    {\n      \"type\":\"ams_collector\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/ambari-metrics-collector.log\"\n    },\n    {\n      \"type\":\"falcon_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{falcon_log_dir}}/falcon.application.log\"\n    },\n    {\
 n      \"type\":\"hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-master-*.log\"\n    },\n    {\n      \"type\":\"hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log\"\n    },\n    {\n      \"type\":\"hdfs_datanode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_namenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_journalnode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_secondarynamenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_zkfc\",\n      \"r
 owtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log\"\n    },\n    {\n      \"type\":\"hive_hiveserver2\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hiveserver2.log\"\n    },\n    {\n      \"type\":\"hive_metastore\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hivemetastore.log\"\n    },\n    {\n      \"type\":\"kafka_controller\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/controller.log\"\n    },\n    {\n      \"type\":\"kafka_request\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/kafka-request.log\"\n    },\n    {\n      \"type\":\"kafka_logcleaner\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/log-cleaner.log\"\n    },\n    {\n      \"type\":\"kafka_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/server.log\"\n    },\n    {\n      \"type\":\"kafka_statechange\",\n      \"rowtype\":\"service
 \",\n      \"path\":\"{{kafka_log_dir}}/state-change.log\"\n    },\n    {\n      \"type\":\"knox_gateway\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/gateway.log\"\n    },\n    {\n      \"type\":\"knox_cli\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/knoxcli.log\"\n    },\n    {\n      \"type\":\"knox_ldap\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/ldap.log\"\n    },\n    {\n      \"type\":\"mapred_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log\"\n    },\n    {\n      \"type\":\"logsearch_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logsearch_log_dir}}/logsearch.log\"\n    },\n    {\n      \"type\":\"logsearch_feeder\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logfeeder_log_dir}}/logfeeder.log\"\n    },\n    {\n      \"type\":\"logsearch_perf\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{l
 ogsearch_log_dir}}/logsearch-performance.log\"\n    },\n    {\n      \"type\":\"ranger_admin\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_admin_log_dir}}/xa_portal.log\"\n    },\n    {\n      \"type\":\"ranger_dbpatch\",\n      \"is_enabled\":\"true\",\n      \"path\":\"{{ranger_admin_log_dir}}/ranger_db_patch.log\"\n    },\n    {\n      \"type\":\"ranger_kms\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_kms_log_dir}}/kms.log\"\n    },\n    {\n      \"type\":\"ranger_usersync\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_usersync_log_dir}}/usersync.log\"\n    },\n    {\n      \"type\":\"oozie_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{oozie_log_dir}}/oozie.log\"\n    },\n    {\n      \"type\":\"yarn_nodemanager\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_resourcemanager\",\n      \"rowtype\":\"service\",\n      \"p
 ath\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_timelineserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_jobsummary\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log\"\n    },\n    {\n      \"type\":\"storm_drpc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/drpc.log\"\n    },\n    {\n      \"type\":\"storm_logviewer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/logviewer.log\"\n    },\n    {\n      \"type\":\"storm_nimbus\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/nimbus.log\"\n    },\n    {\n      \"type\":\"st
 orm_supervisor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/supervisor.log\"\n    },\n    {\n      \"type\":\"storm_ui\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/ui.log\"\n    },\n    {\n      \"type\":\"storm_worker\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/*worker*.log\"\n    },\n    {\n      \"type\":\"zookeeper\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{zk_log_dir}}/zookeeper/zookeeper*.out\"\n    },\n    {\n      \"type\":\"hdfs_audit\",\n      \"rowtype\":\"audit\",\n      \"is_enabled\":\"true\",\n      \"add_fields\":{\n        \"logType\":\"HDFSAudit\",\n        \"enforcer\":\"hadoop-acl\",\n        \"repoType\":\"1\",\n        \"repo\":\"hdfs\"\n      },\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log\"\n    }\n    \n  ],\n  \"filter\":[\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_mas
 ter\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"This one has one extra space after LEVEL\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_gc\",\n            \"accumulo_monitor\",\n            \"accumulo_tracer\",\n            \"accumulo_tserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n\",\n 
      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"atlas_app\",\n            \"falcon_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\"
 :{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_collector\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %p %c: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_hbase_master\",\n    
         \"ams_hbase_regionserver\",\n            \"hbase_master\",\n            \"hbase_regionserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t] %c{2}: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_agent\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})\",\n      \"
 message_pattern\":\"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        },\n        \"level\":{\n          \"map_fieldvalue\":{\n            \"pre_value\":\"WARNING\",\n            \"post_value\":\"WARN\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_server\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{DATE} %5p [%t] %c{1}:%L - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}
 %{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_datanode\",\n            \"hdfs_journalnode\",\n            \"hdfs_secondarynamenode\",\n            \"hdfs_namenode\",\n            \"hdfs_zkfc\",\n            \"knox_gateway\",\n            \"knox_cli\",\n            \"knox_ldap\",\n            \"mapred_historyserver\",\n            \"yarn_historyserver\",\n            \"yarn_jobsummary\",\n            \"yarn_nodemanager\",\n            \"yarn_resourcemanager\",\n            \"yarn_timelineserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTA
 MP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hive_hiveserver2\",\n            \"hive_metastore\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SP
 ACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_controller\",\n            \"kafka_request\",\n            \"kafka_logcleaner\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,S
 SS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_server\",\n            \"kafka_statechange\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"o
 ozie_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"logsearch_app\",\n            \"logsearch_feeder\",\n            \"logsearch_perf\",\n            \"ranger_admin\",\n            \"ranger_dbpatch\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\",\n  
     \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_kms\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{1} - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n  
     \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_usersync\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n   
      \"fields\":{\n          \"type\":[\n            \"storm_drpc\",\n            \"storm_logviewer\",\n            \"storm_nimbus\",\n            \"storm_supervisor\",\n            \"storm_ui\",\n            \"storm_worker\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss.SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"zookeeper\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} - %-5p [%t:%C{1}@%
 L] - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:evtTime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n    
     \"evtTime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"keyvalue\",\n      \"sort_order\":1,\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"source_field\":\"log_message\",\n      \"value_split\":\"=\",\n      \"field_split\":\"\t\",\n      \"post_map_values\":{\n        \"src\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"resource\"\n          }\n          \n        },\n        \"ip\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliIP\"\n          }\n          \n        },\n        \"allowed\":[\n          {\n            \"map_fieldvalue\":{\n              \"pre_value\":\"true\",\n              \"post_value\":\"1\"\n            }\n            \n          },\n          {\n            \"map
 _fieldvalue\":{\n              \"pre_value\":\"false\",\n              \"post_value\":\"0\"\n            }\n            \n          },\n          {\n            \"map_fieldname\":{\n              \"new_fieldname\":\"result\"\n            }\n            \n          }\n          \n        ],\n        \"cmd\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"action\"\n          }\n          \n        },\n        \"proto\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliType\"\n          }\n          \n        },\n        \"callerContext\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"req_caller_id\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"sort_order\":2,\n      \"source_field\":\"ugi\",\n      \"remove_source_field\":\"false\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        
 }\n        \n      },\n      \"message_pattern\":\"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}\",\n      \"post_map_values\":{\n        \"user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"x_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"p_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"k_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"proxyUsers\"\n          }\n          \n        },\n        \"p_authType\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"authType\"\n          }\n          \n        },\n        \"k_authType\":{\n          \"map_fieldname\":{\n            \"new_fie
 ldname\":\"proxyAuthType\"\n          }\n          \n        }\n        \n      }\n      \n    }\n    \n  ],\n  \"output\":[\n    {\n      \"is_enabled\":\"{{solr_service_logs_enable}}\",\n      \"comment\":\"Output to solr for service logs\",\n      \"destination\":\"solr\",\n      \"zk_connect_string\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collection_service_logs}}\",\n      \"number_of_shards\": \"{{logsearch_collection_service_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_service_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"comment\":\"Output to solr for audit records\",\n      \"is_enabled\":\"{{solr_audit_logs_enable}}\",\n      \"destination\":\"solr\",\n      \"zk_connect_string\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collec
 tion_audit_logs}}\",\n      \"number_of_shards\": \"{{logsearch_collection_audit_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_audit_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_service_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_broker_list}}\",\n      \"topic\":\"{{kafka_topic_service_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_audit_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_brok
 er_list}}\",\n      \"topic\":\"{{kafka_topic_audit_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    }\n    \n  ]\n  \n}"
       }
     },
     "configuration_attributes": {


[5/6] ambari git commit: AMBARI-18548. Declarative Logsearch/Logfeeder Component Metadata for Stack Component (oleewere)

Posted by ol...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index 4526640..1f5064a 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 """
 from ambari_commons.constants import AMBARI_SUDO_BINARY
+from logsearch_config_aggregator import get_logfeeder_metadata, get_logsearch_metadata, get_logsearch_meta_configs
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.is_empty import is_empty
@@ -66,6 +67,11 @@ zookeeper_hosts = ",".join(zookeeper_hosts_list)
 cluster_name = str(config['clusterName'])
 availableServices = config['availableServices']
 
+configurations = config['configurations'] # need reference inside logfeeder jinja templates
+logserch_meta_configs = get_logsearch_meta_configs(configurations)
+logsearch_metadata = get_logsearch_metadata(logserch_meta_configs)
+logfeeder_metadata = get_logfeeder_metadata(logserch_meta_configs)
+
 # for now just pick first collector
 if 'metrics_collector_hosts' in config['clusterHostInfo']:
   metrics_collector_hosts_list = ",".join(config['clusterHostInfo']['metrics_collector_hosts'])
@@ -139,41 +145,13 @@ logsearch_app_log4j_content = config['configurations']['logsearch-log4j']['conte
 # Log dirs
 ambari_server_log_dir = '/var/log/ambari-server'
 ambari_agent_log_dir = '/var/log/ambari-agent'
-knox_log_dir = '/var/log/knox'
 hst_log_dir = '/var/log/hst'
 hst_activity_log_dir = '/var/log/smartsense-activity'
 
-metrics_collector_log_dir = default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')
-metrics_monitor_log_dir = default('/configurations/ams-env/metrics_monitor_log_dir', '/var/log/ambari-metrics-monitor')
-metrics_grafana_log_dir = default('/configurations/ams-grafana-env/metrics_grafana_log_dir', '/var/log/ambari-metrics-grafana')
-
-atlas_log_dir = default('/configurations/atlas-env/metadata_log_dir', '/var/log/atlas')
-accumulo_log_dir = default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')
-falcon_log_dir = default('/configurations/falcon-env/falcon_log_dir', '/var/log/falcon')
-flume_log_dir = default('/configurations/flume-env/flume_log_dir', '/var/log/flume')
-hbase_log_dir = default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')
-hdfs_log_dir_prefix = default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')
-hive_log_dir = default('/configurations/hive-env/hive_log_dir', '/var/log/hive')
-hcat_log_dir = default('configurations/hive-env/hcat_log_dir', '/var/log/webhcat')
-infra_solr_log_dir = default('configurations/infra-solr-env/infra_solr_log_dir', '/var/log/ambari-infra-solr')
-kafka_log_dir = default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')
-nifi_log_dir = default('/configurations/nifi-env/nifi_node_log_dir', '/var/log/nifi')
-oozie_log_dir = default('/configurations/oozie-env/oozie_log_dir', '/var/log/oozie')
-ranger_usersync_log_dir = default('/configurations/ranger-env/ranger_usersync_log_dir', '/var/log/ranger/usersync')
-ranger_admin_log_dir = default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')
-ranger_kms_log_dir = default('/configurations/kms-env/kms_log_dir', '/var/log/ranger/kms')
-storm_log_dir = default('/configurations/storm-env/storm_log_dir', '/var/log/storm')
-yarn_log_dir_prefix = default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')
-mapred_log_dir_prefix = default('/configurations/mapred-env/mapred_log_dir_prefix', '/var/log/hadoop')
-zeppelin_log_dir = default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')
-zk_log_dir = default('/configurations/zookeeper-env/zk_log_dir', '/var/log/zookeeper')
-spark_log_dir = default('/configurations/spark-env/spark_log_dir', '/var/log/spark')
-livy_log_dir = default('/configurations/livy-env/livy_log_dir', '/var/log/livy')
-spark2_log_dir = default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')
-
-hdfs_user = default('configurations/hadoop-env/hdfs_user', 'hdfs')
-mapred_user =  default('configurations/mapred-env/mapred_user', 'mapred')
-yarn_user =  default('configurations/yarn-env/yarn_user', 'yarn')
+# System logs
+logfeeder_system_messages_content = config['configurations']['logfeeder-system_log-env']['logfeeder_system_messages_content']
+logfeeder_secure_log_content = config['configurations']['logfeeder-system_log-env']['logfeeder_secure_log_content']
+logfeeder_system_log_enabled = default('/configurations/logfeeder-system_log-env/logfeeder_system_log_enabled', False)
 
 # Logsearch auth configs
 
@@ -272,14 +250,17 @@ logfeeder_truststore_location = config['configurations']['logfeeder-env']['logfe
 logfeeder_truststore_password = config['configurations']['logfeeder-env']['logfeeder_truststore_password']
 logfeeder_truststore_type = config['configurations']['logfeeder-env']['logfeeder_truststore_type']
 
-logfeeder_supported_services = ['accumulo', 'ambari', 'ams', 'atlas', 'falcon', 'flume', 'hbase', 'hdfs', 'hive', 'hst', 'infra',
-                                'kafka', 'knox', 'logsearch', 'nifi', 'oozie', 'ranger', 'spark', 'spark2', 'storm', 'yarn',
-                                'zeppelin', 'zookeeper']
+logfeeder_default_services = ['ambari', 'logsearch']
+logfeeder_default_config_file_names = ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_default_services]
+logfeeder_custom_config_file_names = ['input.config-%s.json' % (tag.replace('-logsearch-conf', ''))
+                                      for tag, content in logfeeder_metadata.iteritems() if any(logfeeder_metadata)]
 
-logfeeder_config_file_names = \
-  ['global.config.json', 'output.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
+if logfeeder_system_log_enabled:
+  default_config_files = ','.join(logfeeder_default_config_file_names + logfeeder_custom_config_file_names
+                                  + ['input.config-system_messages.json', 'input.config-secure_log.json'])
+else:
+  default_config_files = ','.join(logfeeder_default_config_file_names + logfeeder_custom_config_file_names)
 
-default_config_files = ','.join(logfeeder_config_file_names)
 
 logfeeder_grok_patterns = config['configurations']['logfeeder-grok']['default_grok_patterns']
 if config['configurations']['logfeeder-grok']['custom_grok_patterns'].strip():
@@ -292,8 +273,6 @@ if config['configurations']['logfeeder-grok']['custom_grok_patterns'].strip():
     '\n' + \
     config['configurations']['logfeeder-grok']['custom_grok_patterns']
 
-logfeeder_properties = dict(config['configurations']['logfeeder-properties'])
-
 # logfeeder properties
 
 # load default values

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
index ad16958..e90d623 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
@@ -17,6 +17,7 @@ limitations under the License.
 
 """
 
+from resource_management.libraries.functions.default import default
 from resource_management.core.resources.system import Directory, File
 from resource_management.libraries.functions.format import format
 from resource_management.core.source import InlineTemplate, Template
@@ -61,11 +62,25 @@ def setup_logfeeder():
        encoding="utf-8"
        )
 
-  for file_name in params.logfeeder_config_file_names:
+  for file_name in params.logfeeder_default_config_file_names:
     File(format("{logsearch_logfeeder_conf}/" + file_name),
          content=Template(file_name + ".j2")
          )
 
+  for service, pattern_content in params.logfeeder_metadata.iteritems():
+    File(format("{logsearch_logfeeder_conf}/input.config-" + service.replace('-logsearch-conf', '') + ".json"),
+      content=InlineTemplate(pattern_content, extra_imports=[default])
+    )
+
+  if params.logfeeder_system_log_enabled:
+    File(format("{logsearch_logfeeder_conf}/input.config-system_messages.json"),
+         content=params.logfeeder_system_messages_content
+         )
+    File(format("{logsearch_logfeeder_conf}/input.config-secure_log.json"),
+         content=params.logfeeder_secure_log_content
+         )
+
+
   if params.security_enabled:
     File(format("{logfeeder_jaas_file}"),
          content=Template("logfeeder_jaas.conf.j2")

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
index 7982631..7e9153f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
@@ -15,520 +15,86 @@
  # See the License for the specific language governing permissions and
  # limitations under the License.
  #}
-{
-  "service": {
-{% if "ACCUMULO" in availableServices %}
-    "accumulo": {
-      "label": "Accumulo",
+ {
+   "service": {
+{% if logsearch_metadata and logsearch_metadata.keys()|length > 0 %}
+  {% for metadata_key, component_list in logsearch_metadata.iteritems() %}
+    "{{ metadata_key.lower() }}": {
+      "label": "{{ metadata_key }}",
       "components": [
+    {% for component in component_list %}
         {
-          "name": "accumulo_gc"
-        },
-        {
-          "name": "accumulo_master"
-        },
-        {
-          "name": "accumulo_monitor"
-        },
-        {
-          "name": "accumulo_tracer"
-        },
-        {
-          "name": "accumulo_tserver"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "ATLAS" in availableServices %}
-    "atlas": {
-      "label": "Atlas",
-      "components": [
-        {
-          "name": "atlas_app"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-    "ambari": {
-      "label": "Ambari",
-      "components": [
-        {
-          "name": "ambari_agent"
-        },
-        {
-          "name": "ambari_server"
-        },
-        {
-          "name": "ambari_alerts"
-        },
-        {
-          "name": "ambari_audit"
-        },
-        {
-          "name": "ambari_config_changes"
-        },
-        {
-          "name": "ambari_eclipselink"
-        },
-        {
-          "name": "ambari_server_check_database"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% if "AMBARI_METRICS" in availableServices %}
-    "ams": {
-      "label": "AMS",
-      "components": [
-        {
-          "name": "ams_hbase_master"
-        },
-        {
-          "name": "ams_hbase_regionserver"
-        },
-        {
-          "name": "ams_collector"
-        },
-        {
-          "name": "ams_monitor"
-        },
-        {
-          "name": "ams_grafana"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "FALCON" in availableServices %}
-    "falcon": {
-      "label": "Falcon",
-      "components": [
-        {
-          "name": "falcon_app"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "FLUME" in availableServices %}
-    "flume": {
-      "label": "Flume",
-      "components": [
-        {
-          "name": "flume_handler"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "HBASE" in availableServices %}
-    "hbase": {
-      "label": "HBase",
-      "components": [
-        {
-          "name": "hbase_master"
-        },
-        {
-          "name": "hbase_regionserver"
-        },
-        {
-          "name": "hbase_phoenix_server"
-        }
-      ],
-      "dependencies": [
-        {
-          "service": "hdfs",
-          "components": [
-            "hdfs_namenode"
-          ]
-        }
-      ]
-    },
-{% endif %}
-{% if "HDFS" in availableServices %}
-    "hdfs": {
-      "label": "HDFS",
-      "components": [
-        {
-          "name": "hdfs_datanode"
-        },
-        {
-          "name": "hdfs_namenode"
-        },
-        {
-          "name": "hdfs_journalnode"
-        },
-        {
-          "name": "hdfs_secondarynamenode"
-        },
-        {
-          "name": "hdfs_zkfc"
-        },
-        {
-          "name": "hdfs_nfs3"
-        },
-        {
-          "name": "hdfs_audit",
-          "rowtype": "audit"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "HIVE" in availableServices %}
-    "hive": {
-      "label": "Hive",
-      "components": [
-        {
-          "name": "hive_hiveserver2"
-        },
-        {
-          "name": "hive_metastore"
-        },
-        {
-          "name": "webhcat_server"
-        }
-      ],
-      "dependencies": [
-        {
-          "service": "hdfs",
-          "components": [
-            "hdfs_namenode"
-          ]
-        }
-      ]
-    },
-{% endif %}
-{% if "KAFKA" in availableServices %}
-    "kafka": {
-      "label": "Kafka",
-      "components": [
-        {
-          "name": "kafka_controller"
-        },
-        {
-          "name": "kafka_request"
-        },
-        {
-          "name": "kafka_logcleaner"
-        },
-        {
-          "name": "kafka_server"
-        },
-        {
-          "name": "kafka_statechange"
-        }
-      ],
-      "dependencies": [
-        {
-          "service": "zookeeper",
-          "components": [
-            "zookeeper"
-          ]
-        }
-      ]
-    },
-{% endif %}
-    "ambari_infra" : {
-      "label": "Infra",
-      "components" : [
-        {
-          "name": "infra_solr"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% if "KNOX" in availableServices %}
-    "knox": {
-      "label": "Knox",
-      "components": [
-        {
-          "name": "knox_gateway"
-        },
-        {
-          "name": "knox_cli"
-        },
-        {
-          "name": "knox_ldap"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "NIFI" in availableServices %}
-    "nifi": {
-      "label": "NiFi",
-      "components": [
-        {
-          "name": "nifi_app"
-        },
-        {
-          "name": "nifi_bootstrap"
-        },
-        {
-          "name": "nifi_setup"
-        },
-        {
-          "name": "nifi_user"
-        },
-        {
-          "name": "nifi_ca"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "MAPREDUCE2" in availableServices %}
-    "mapred": {
-      "label": "MapReduce",
-      "components": [
-        {
-          "name": "mapred_historyserver"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-    "logsearch": {
-      "label": "Logsearch",
-      "components": [
-        {
-          "name": "logsearch_app"
-        },
-        {
-          "name": "logsearch_feeder"
-        },
-        {
-          "name": "logsearch_perf"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% if "RANGER" in availableServices %}
-    "ranger": {
-      "label": "Ranger",
-      "components": [
-        {
-          "name": "ranger_admin"
-        },
-        {
-          "name": "ranger_dbpatch"
-        },
-        {
-          "name": "ranger_kms"
-        },
-        {
-          "name": "ranger_usersync"
-        }
-      ],
-      "dependencies": [
-        {
-          "service": "hdfs",
-          "required": "optional",
-          "components": [
-            "hdfs_namenode"
-          ]
-        },
-        {
-          "service": "hbase",
-          "required": "optional",
-          "components": [
-            "hbase_master",
-            "hbase_regionserver"
-          ]
-        },
-        {
-          "service": "hive",
-          "required": "optional",
-          "components": [
-            "hive_hiveserver2"
-          ]
-        },
-        {
-          "service": "kafka",
-          "required": "optional",
-          "components": [
-            "kafka_ranger"
-          ]
-        },
-        {
-          "service": "knox",
-          "required": "optional",
-          "components": [
-            "knox_gateway"
-          ]
-        },
-        {
-          "service": "storm",
-          "required": "optional",
-          "components": [
-            "storm_supervisor"
-          ]
-        },
-        {
-          "service": "yarn",
-          "required": "optional",
-          "components": [
-            "yarn_resourcemanager"
-          ]
-        }
-      ]
-    },
-{% endif %}
-{% if "OOZIE" in availableServices %}
-    "oozie": {
-      "label": "Oozie",
-      "components": [
-        {
-          "name": "oozie_app"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "SMARTSENSE" in availableServices %}
-    "hst": {
-      "label": "SmartSense",
-      "components": [
-        {
-          "name": "hst_server"
-        },
-        {
-          "name": "hst_agent"
-        },
-        {
-          "name": "activity_analyzer"
-        },
-        {
-          "name": "activity_explorer"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "SPARK" in availableServices %}
-    "spark": {
-      "label": "Spark",
-      "components": [
-        {
-          "name": "spark_jobhistory_server"
-        },
-        {
-          "name": "spark_thriftserver"
-        },
-        {
-          "name": "livy_server"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "SPARK2" in availableServices %}
-    "spark2": {
-      "label": "Spark 2",
-      "components": [
-        {
-          "name": "spark2_jobhistory_server"
-        },
-        {
-          "name": "spark2_thriftserver"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "STORM" in availableServices %}
-    "storm": {
-      "label": "Storm",
-      "components": [
-        {
-          "name": "storm_drpc"
-        },
-        {
-          "name": "storm_logviewer"
-        },
-        {
-          "name": "storm_nimbus"
-        },
-        {
-          "name": "storm_supervisor"
-        },
-        {
-          "name": "storm_ui"
-        },
-        {
-          "name": "storm_worker"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "YARN" in availableServices %}
-    "yarn": {
-      "label": "YARN",
-      "components": [
-        {
-          "name": "yarn_nodemanager"
-        },
-        {
-          "name": "yarn_resourcemanager"
-        },
-        {
-          "name": "yarn_timelineserver"
-        },
-        {
-          "name": "yarn_historyserver"
-        },
-        {
-          "name": "yarn_jobsummary"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "ZEPPELIN" in availableServices %}
-    "zeppelin": {
-      "label": "Zeppelin",
-      "components": [
-        {
-          "name": "zeppelin"
-        }
-      ],
-      "dependencies": [
-      ]
-    },
-{% endif %}
-{% if "ZOOKEEPER" in availableServices %}
-    "zookeeper": {
-      "label": "ZooKeeper",
-      "components": [
-        {
-          "name": "zookeeper"
+          "name": "{{ component }}"
         }
+      {% if not loop.last %}
+        ,
+      {% endif %}
+    {% endfor %}
       ],
       "dependencies": [
       ]
     }
-{% endif %}
+     ,
+  {% endfor %}
+{% endif %}
+{% if logfeeder_system_log_enabled %}
+     "system" : {
+       "label" : "System",
+       "components" : [
+        {
+          "name": "system_message"
+        },
+        {
+          "name": "secure_log"
+        }
+       ],
+       "dependencies": [
+       ]
+     },
+{% endif %}
+     "ambari": {
+       "label": "Ambari",
+       "components": [
+         {
+           "name": "ambari_agent"
+         },
+         {
+           "name": "ambari_server"
+         },
+         {
+           "name": "ambari_alerts"
+         },
+         {
+           "name": "ambari_audit"
+         },
+         {
+           "name": "ambari_config_changes"
+         },
+         {
+           "name": "ambari_eclipselink"
+         },
+         {
+           "name": "ambari_server_check_database"
+         }
+       ],
+       "dependencies": [
+       ]
+     },
+     "logsearch": {
+       "label": "Logsearch",
+       "components": [
+       {
+         "name": "logsearch_app"
+       },
+       {
+         "name": "logsearch_feeder"
+       },
+       {
+         "name": "logsearch_perf"
+       }
+       ],
+       "dependencies": [
+       ]
+      }
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
deleted file mode 100644
index 1fd93cf..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-accumulo.json.j2
+++ /dev/null
@@ -1,105 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"accumulo_gc",
-      "rowtype":"service",
-      "path":"{{accumulo_log_dir}}/gc_*.log"
-    },
-    {
-      "type":"accumulo_master",
-      "rowtype":"service",
-      "path":"{{accumulo_log_dir}}/master_*.log"
-    },
-    {
-      "type":"accumulo_monitor",
-      "rowtype":"service",
-      "path":"{{accumulo_log_dir}}/monitor_*.log"
-    },
-    {
-      "type":"accumulo_tracer",
-      "rowtype":"service",
-      "path":"{{accumulo_log_dir}}/tracer_*.log"
-    },
-    {
-      "type":"accumulo_tserver",
-      "rowtype":"service",
-      "path":"{{accumulo_log_dir}}/tserver_*.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "accumulo_master"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "comment":"This one has one extra space after LEVEL",
-      "conditions":{
-        "fields":{
-          "type":[
-            "accumulo_gc",
-            "accumulo_monitor",
-            "accumulo_tracer",
-            "accumulo_tserver"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-          
-        }
-
-      }
-
-    }
-
-  ]
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
index 36507c9..ad4adb2 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ambari.json.j2
@@ -31,7 +31,7 @@
       "type":"ambari_alerts",
       "rowtype":"service",
       "add_fields":{
-        "level":"INFO"
+        "level":"UNKNOWN"
       },
       "path":"{{ambari_server_log_dir}}/ambari-alerts.log"
     },

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
deleted file mode 100644
index 0bcaf56..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ams.json.j2
+++ /dev/null
@@ -1,191 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"ams_hbase_master",
-      "rowtype":"service",
-      "path":"{{metrics_collector_log_dir}}/hbase-ams-master-*.log"
-    },
-    {
-      "type":"ams_hbase_regionserver",
-      "rowtype":"service",
-      "path":"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log"
-    },
-    {
-      "type":"ams_collector",
-      "rowtype":"service",
-      "path":"{{metrics_collector_log_dir}}/ambari-metrics-collector.log"
-    },
-    {
-      "type":"ams_monitor",
-      "rowtype":"service",
-      "path":"{{metrics_monitor_log_dir}}/ambari-metrics-monitor.out"
-    },
-    {
-      "type":"ams_grafana",
-      "rowtype":"service",
-      "path":"{{metrics_grafana_log_dir}}/grafana.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_collector"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %p %c: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_hbase_master",
-            "ams_hbase_regionserver"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_grafana"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{DATESTAMP:logtime})",
-      "message_pattern":"(?m)^%{DATESTAMP:logtime}%{SPACE}\\[%{WORD:level}\\]%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy/MM/dd HH:mm:ss"
-          }
-
-        },
-        "level":[
-          {
-            "map_fieldvalue":{
-              "pre_value":"I",
-              "post_value":"INFO"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"W",
-              "post_value":"WARN"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"D",
-              "post_value":"DEBUG"
-             }
-           },
-           {
-             "map_fieldvalue":{
-               "pre_value":"E",
-               "post_value":"ERROR"
-             }
-           },
-           {
-             "map_fieldvalue":{
-               "pre_value":"F",
-               "post_value":"FATAL"
-             }
-           }
-         ]
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_monitor"
-          ]
- 
-        }
- 
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      },
-      "level":[
-        {
-          "map_fieldvalue":{
-            "pre_value":"WARNING",
-            "post_value":"WARN"
-          }
-        }
-      ]
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
deleted file mode 100644
index 53912c8..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-atlas.json.j2
+++ /dev/null
@@ -1,55 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"atlas_app",
-      "rowtype":"service",
-      "path":"{{atlas_log_dir}}/application.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "atlas_app"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
deleted file mode 100644
index a40e52d..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-falcon.json.j2
+++ /dev/null
@@ -1,55 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"falcon_app",
-      "rowtype":"service",
-      "path":"{{falcon_log_dir}}/falcon.application.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "falcon_app"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
deleted file mode 100644
index b31054b..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-flume.json.j2
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"flume_handler",
-      "rowtype":"service",
-      "path":"{{flume_log_dir}}/flume.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "flume_handler"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}\\(%{JAVACLASS:class}\\.%{JAVAMETHOD:method}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
deleted file mode 100644
index 0c04052..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hbase.json.j2
+++ /dev/null
@@ -1,91 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"hbase_master",
-      "rowtype":"service",
-      "path":"{{hbase_log_dir}}/hbase-*-master-*.log"
-    },
-    {
-      "type":"hbase_regionserver",
-      "rowtype":"service",
-      "path":"{{hbase_log_dir}}/hbase-*-regionserver-*.log"
-    },
-    {
-      "type":"hbase_phoenix_server",
-      "rowtype":"service",
-      "path":"{{hbase_log_dir}}/phoenix-*-server.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_master",
-            "hbase_regionserver"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_phoenix_server"
-          ]
-
-        }
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
deleted file mode 100644
index 3dff6f5..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hdfs.json.j2
+++ /dev/null
@@ -1,251 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"hdfs_datanode",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-datanode-*.log"
-    },
-    {
-      "type":"hdfs_namenode",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-namenode-*.log"
-    },
-    {
-      "type":"hdfs_journalnode",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-journalnode-*.log"
-    },
-    {
-      "type":"hdfs_secondarynamenode",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-secondarynamenode-*.log"
-    },
-    {
-      "type":"hdfs_zkfc",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-zkfc-*.log"
-    },
-    {
-      "type":"hdfs_nfs3",
-      "rowtype":"service",
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hadoop-{{hdfs_user}}-nfs3-*.log"
-    },
-    {
-      "type":"hdfs_audit",
-      "rowtype":"audit",
-      "is_enabled":"true",
-      "add_fields":{
-        "logType":"HDFSAudit",
-        "enforcer":"hadoop-acl",
-        "repoType":"1",
-        "repo":"hdfs"
-      },
-      "path":"{{hdfs_log_dir_prefix}}/{{hdfs_user}}/hdfs-audit.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_datanode",
-            "hdfs_journalnode",
-            "hdfs_secondarynamenode",
-            "hdfs_namenode",
-            "hdfs_zkfc",
-            "hdfs_nfs3"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "evtTime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"keyvalue",
-      "sort_order":1,
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-
-        }
-
-      },
-      "source_field":"log_message",
-      "value_split":"=",
-      "field_split":"\t",
-      "post_map_values":{
-        "src":{
-          "map_fieldname":{
-            "new_fieldname":"resource"
-          }
-
-        },
-        "ip":{
-          "map_fieldname":{
-            "new_fieldname":"cliIP"
-          }
-
-        },
-        "allowed":[
-          {
-            "map_fieldvalue":{
-              "pre_value":"true",
-              "post_value":"1"
-            }
-
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"false",
-              "post_value":"0"
-            }
-
-          },
-          {
-            "map_fieldname":{
-              "new_fieldname":"result"
-            }
-
-          }
-
-        ],
-        "cmd":{
-          "map_fieldname":{
-            "new_fieldname":"action"
-          }
-
-        },
-        "proto":{
-          "map_fieldname":{
-            "new_fieldname":"cliType"
-          }
-
-        },
-        "callerContext":{
-          "map_fieldname":{
-            "new_fieldname":"req_caller_id"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "sort_order":2,
-      "source_field":"ugi",
-      "remove_source_field":"false",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-
-        }
-
-      },
-      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
-      "post_map_values":{
-        "user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-
-        },
-        "x_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-
-        },
-        "p_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-
-        },
-        "k_user":{
-          "map_fieldname":{
-            "new_fieldname":"proxyUsers"
-          }
-
-        },
-        "p_authType":{
-          "map_fieldname":{
-            "new_fieldname":"authType"
-          }
-
-        },
-        "k_authType":{
-          "map_fieldname":{
-            "new_fieldname":"proxyAuthType"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
deleted file mode 100644
index f96b16f..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-hive.json.j2
+++ /dev/null
@@ -1,97 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"hive_hiveserver2",
-      "rowtype":"service",
-      "path":"{{hive_log_dir}}/hiveserver2.log"
-    },
-    {
-      "type":"hive_metastore",
-      "rowtype":"service",
-      "path":"{{hive_log_dir}}/hivemetastore.log"
-    },
-    {
-      "type": "webhcat_server",
-      "rowntype":"service",
-      "path":"{{hcat_log_dir}}/webhcat.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hive_hiveserver2",
-            "hive_metastore"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "webhcat_server"
-          ]
-
-        }
-
-      },
-      "log4j_format":" %-5p | %d{DATE} | %c | %m%n",
-      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime})",
-      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime}%{CUSTOM_SEPARATOR}%{JAVACLASS:file}%{CUSTOM_SEPARATOR}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
-          }
-
-        },
-        "level":{
-           "map_fieldvalue":{
-             "pre_value":"WARNING",
-             "post_value":"WARN"
-            }
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
deleted file mode 100644
index 20fa49d..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-infra.json.j2
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"infra_solr",
-      "rowtype":"service",
-      "path":"{{infra_solr_log_dir}}/solr.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "infra_solr"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
deleted file mode 100644
index d05d4ad..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-kafka.json.j2
+++ /dev/null
@@ -1,105 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"kafka_controller",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/controller.log"
-    },
-    {
-      "type":"kafka_request",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/kafka-request.log"
-    },
-    {
-      "type":"kafka_logcleaner",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/log-cleaner.log"
-    },
-    {
-      "type":"kafka_server",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/server.log"
-    },
-    {
-      "type":"kafka_statechange",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/state-change.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "kafka_controller",
-            "kafka_request",
-            "kafka_logcleaner"
-          ]
-
-        }
-
-      },
-      "log4j_format":"[%d] %p %m (%c)%n",
-      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
-      "conditions":{
-        "fields":{
-          "type":[
-            "kafka_server",
-            "kafka_statechange"
-          ]
-
-        }
-
-      },
-      "log4j_format":"[%d] %p %m (%c)%n",
-      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
deleted file mode 100644
index df98e25..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-knox.json.j2
+++ /dev/null
@@ -1,68 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"knox_gateway",
-      "rowtype":"service",
-      "path":"{{knox_log_dir}}/gateway.log"
-    },
-    {
-      "type":"knox_cli",
-      "rowtype":"service",
-      "path":"{{knox_log_dir}}/knoxcli.log"
-    },
-    {
-      "type":"knox_ldap",
-      "rowtype":"service",
-      "path":"{{knox_log_dir}}/ldap.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "knox_gateway",
-            "knox_cli",
-            "knox_ldap"
-          ]
-          
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
deleted file mode 100644
index ddb8198..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-oozie.json.j2
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"oozie_app",
-      "rowtype":"service",
-      "path":"{{oozie_log_dir}}/oozie.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "oozie_app"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
deleted file mode 100644
index 7b12869..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-ranger.json.j2
+++ /dev/null
@@ -1,122 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"ranger_admin",
-      "rowtype":"service",
-      "path":"{{ranger_admin_log_dir}}/xa_portal.log"
-    },
-    {
-      "type":"ranger_dbpatch",
-      "is_enabled":"true",
-      "path":"{{ranger_admin_log_dir}}/ranger_db_patch.log"
-    },
-    {
-      "type":"ranger_kms",
-      "rowtype":"service",
-      "path":"{{ranger_kms_log_dir}}/kms.log"
-    },
-    {
-      "type":"ranger_usersync",
-      "rowtype":"service",
-      "path":"{{ranger_usersync_log_dir}}/usersync.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_admin",
-            "ranger_dbpatch"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_kms"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_usersync"
-          ]
-          
-        }
-        
-      },
-      "log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
-      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
deleted file mode 100644
index 4371276..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark.json.j2
+++ /dev/null
@@ -1,72 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
- {
-   "input":[
-
-     {
-       "type":"spark_jobhistory_server",
-       "rowtype":"service",
-       "path":"{{spark_log_dir}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
-     },
-     {
-       "type":"spark_thriftserver",
-       "rowtype":"service",
-       "path":"{{spark_log_dir}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
-     },
-     {
-       "type":"livy_server",
-       "rowtype":"service",
-       "path":"{{livy_log_dir}}/livy-livy-server.out"
-     }
-   ],
-   "filter":[
-       {
-          "filter":"grok",
-          "conditions":{
-            "fields":{
-              "type":[
-                "spark_jobhistory_server",
-                "spark_thriftserver",
-                "livy_server"
-              ]
-
-            }
-          },
-          "log4j_format":"",
-          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
-          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
-          "post_map_values":{
-            "logtime":{
-              "map_date":{
-                "target_date_pattern":"yy/MM/dd HH:mm:ss"
-              }
-
-            },
-            "level":{
-              "map_fieldvalue":{
-                "pre_value":"WARNING",
-                "post_value":"WARN"
-              }
-
-            }
-
-          }
-      }
-   ]
-
- }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
deleted file mode 100644
index 01aea36..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-spark2.json.j2
+++ /dev/null
@@ -1,66 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
- {
-   "input":[
-
-     {
-       "type":"spark2_jobhistory_server",
-       "rowtype":"service",
-       "path":"{{spark2_log_dir}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
-     },
-     {
-       "type":"spark2_thriftserver",
-       "rowtype":"service",
-       "path":"{{spark2_log_dir}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
-     }
-   ],
-   "filter":[
-   {
-          "filter":"grok",
-          "conditions":{
-            "fields":{
-              "type":[
-                "spark2_jobhistory_server",
-                "spark2_thriftserver"
-              ]
-
-            }
-          },
-          "log4j_format":"",
-          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
-          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
-          "post_map_values":{
-            "logtime":{
-              "map_date":{
-                "target_date_pattern":"yy/MM/dd HH:mm:ss"
-              }
-
-            },
-            "level":{
-              "map_fieldvalue":{
-                "pre_value":"WARNING",
-                "post_value":"WARN"
-              }
-
-            }
-
-          }
-      }
-   ]
-
- }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
deleted file mode 100644
index 07a4c2e..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-storm.json.j2
+++ /dev/null
@@ -1,86 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"storm_drpc",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/drpc.log"
-    },
-    {
-      "type":"storm_logviewer",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/logviewer.log"
-    },
-    {
-      "type":"storm_nimbus",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/nimbus.log"
-    },
-    {
-      "type":"storm_supervisor",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/supervisor.log"
-    },
-    {
-      "type":"storm_ui",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/ui.log"
-    },
-    {
-      "type":"storm_worker",
-      "rowtype":"service",
-      "path":"{{storm_log_dir}}/*worker*.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "storm_drpc",
-            "storm_logviewer",
-            "storm_nimbus",
-            "storm_supervisor",
-            "storm_ui",
-            "storm_worker"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
deleted file mode 100644
index f8e77ae..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-yarn.json.j2
+++ /dev/null
@@ -1,86 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"mapred_historyserver",
-      "rowtype":"service",
-      "path":"{{mapred_log_dir_prefix}}/{{mapred_user}}/mapred-{{mapred_user}}-historyserver*.log"
-    },
-    {
-      "type":"yarn_nodemanager",
-      "rowtype":"service",
-      "path":"{{yarn_log_dir_prefix}}/{{yarn_user}}/yarn-{{yarn_user}}-nodemanager-*.log"
-    },
-    {
-      "type":"yarn_resourcemanager",
-      "rowtype":"service",
-      "path":"{{yarn_log_dir_prefix}}/{{yarn_user}}/yarn-{{yarn_user}}-resourcemanager-*.log"
-    },
-    {
-      "type":"yarn_timelineserver",
-      "rowtype":"service",
-      "path":"{{yarn_log_dir_prefix}}/{{yarn_user}}/yarn-{{yarn_user}}-timelineserver-*.log"
-    },
-    {
-      "type":"yarn_historyserver",
-      "rowtype":"service",
-      "path":"{{yarn_log_dir_prefix}}/{{yarn_user}}/yarn-{{yarn_user}}-historyserver-*.log"
-    },
-    {
-      "type":"yarn_jobsummary",
-      "rowtype":"service",
-      "path":"{{yarn_log_dir_prefix}}/{{yarn_user}}/hadoop-mapreduce.jobsummary.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "mapred_historyserver",
-            "yarn_historyserver",
-            "yarn_jobsummary",
-            "yarn_nodemanager",
-            "yarn_resourcemanager",
-            "yarn_timelineserver"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
deleted file mode 100644
index c0948b5..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zeppelin.json.j2
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"zeppelin",
-      "rowtype":"service",
-      "path":"{{zeppelin_log_dir}}/zeppelin-zeppelin-*.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "zeppelin"
-          ]
-
-        }
-
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
deleted file mode 100644
index fcc5dc9..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-zookeeper.json.j2
+++ /dev/null
@@ -1,56 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"zookeeper",
-      "rowtype":"service",
-      "path":"{{zk_log_dir}}/zookeeper*.log"
-    }
-
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "zookeeper"
-          ]
-
-        }
-
-      },
-      "log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-
-        }
-
-      }
-
-    }
-
-  ]
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/secure_log.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/secure_log.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/secure_log.json
new file mode 100644
index 0000000..9d9585e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/secure_log.json
@@ -0,0 +1,31 @@
+{
+  "input": [
+    {
+      "type": "secure_log",
+      "rowtype": "service",
+      "path": "/var/log/secure"
+    }
+  ],
+  "filter": [
+    {
+      "filter": "grok",
+      "conditions": {
+        "fields": {
+          "type": [
+            "secure_log"
+          ]
+        }
+      },
+      "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})",
+      "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values": {
+        "logtime": {
+          "map_date": {
+            "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS",
+            "src_date_pattern" :"MMM dd HH:mm:ss"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/system_messages.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/system_messages.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/system_messages.json
new file mode 100644
index 0000000..e2ea3eb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/system_messages.json
@@ -0,0 +1,31 @@
+{
+  "input": [
+    {
+      "type": "system_message",
+      "rowtype": "service",
+      "path": "/var/log/messages"
+    }
+  ],
+  "filter": [
+    {
+      "filter": "grok",
+      "conditions": {
+        "fields": {
+          "type": [
+            "system_message"
+          ]
+        }
+      },
+      "multiline_pattern": "^(%{SYSLOGTIMESTAMP:logtime})",
+      "message_pattern": "(?m)^%{SYSLOGTIMESTAMP:logtime}%{SPACE}%{SYSLOGHOST:host}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values": {
+        "logtime": {
+          "map_date": {
+            "target_date_pattern": "yyyy-MM-dd HH:mm:ss,SSS",
+            "src_date_pattern" :"MMM dd HH:mm:ss"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file


[3/6] ambari git commit: AMBARI-18616. Fix Log Search User Config bug (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-18616. Fix Log Search User Config bug (Miklos Gergely via oleewere)

Change-Id: I008c431c07ae61a2121b0addf6747b7d9b53c618


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3114b1c6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3114b1c6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3114b1c6

Branch: refs/heads/logsearch-ga
Commit: 3114b1c6bd69f02b73d3b19434e9ae3a0641fd94
Parents: 8073a1b
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Oct 19 17:30:44 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Wed Oct 19 17:30:44 2016 +0200

----------------------------------------------------------------------
 .../common/HadoopServiceConfigHelper.java       |  80 ++++++++++++++
 .../ambari/logsearch/dao/UserConfigSolrDao.java | 107 +++++--------------
 .../ambari/logsearch/manager/ManagerBase.java   |  35 ------
 .../logsearch/manager/ServiceLogsManager.java   |  13 +++
 .../model/common/LogFeederDataMap.java          |   8 +-
 .../views/filter/CreateLogfeederFilterView.js   |  81 ++++++++------
 6 files changed, 175 insertions(+), 149 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java
new file mode 100644
index 0000000..0e2087f
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/common/HadoopServiceConfigHelper.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.common;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+import com.google.gson.JsonParseException;
+
+public class HadoopServiceConfigHelper {
+  private static final Logger LOG = Logger.getLogger(HadoopServiceConfigHelper.class);
+  
+  public static String getHadoopServiceConfigJSON() {
+    String fileContent = null;
+
+    try {
+      ClassLoader classLoader = HadoopServiceConfigHelper.class.getClassLoader();
+      File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
+      fileContent = FileUtils.readFileToString(file);
+    } catch (IOException e) {
+      LOG.error("Unable to read HadoopServiceConfig.json", e);
+    }
+
+    return JSONUtil.isJSONValid(fileContent) ? fileContent : null;
+  }
+  
+  @SuppressWarnings("unchecked")
+  public static Set<String> getAllLogIds() {
+    Set<String> logIds = new TreeSet<>();
+    
+    String key = null;
+    JSONArray componentArray = null;
+    try {
+      String hadoopServiceConfigJSON = getHadoopServiceConfigJSON();
+      JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceConfigJSON).getJSONObject("service");
+      Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys();
+      while (hadoopSerivceKeys.hasNext()) {
+        key = hadoopSerivceKeys.next();
+        componentArray = hadoopServiceJsonObject.getJSONObject(key).getJSONArray("components");
+        for (int i = 0; i < componentArray.length(); i++) {
+          JSONObject componentJsonObject = (JSONObject) componentArray.get(i);
+          String logId = componentJsonObject.getString("name");
+          logIds.add(logId);
+        }
+      }
+    } catch (JsonParseException | JSONException je) {
+      LOG.error("Error parsing JSON. key=" + key + ", componentArray=" + componentArray, je);
+      return null;
+    }
+
+    return logIds;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index 58337f7..418a405 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -19,22 +19,22 @@
 
 package org.apache.ambari.logsearch.dao;
 
-import java.io.File;
 import java.io.IOException;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
-import java.util.Scanner;
+import java.util.Set;
+import java.util.TreeMap;
+
 import javax.annotation.PostConstruct;
 import javax.inject.Inject;
 import javax.inject.Named;
 
+import org.apache.ambari.logsearch.common.HadoopServiceConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.LogSearchContext;
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.conf.SolrUserPropsConfig;
 import org.apache.ambari.logsearch.model.common.LogFeederDataMap;
+import org.apache.ambari.logsearch.model.common.LogfeederFilterData;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -43,15 +43,11 @@ import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import com.google.gson.JsonParseException;
 
 import org.apache.ambari.logsearch.util.JSONUtil;
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.log4j.Logger;
 import org.springframework.data.solr.core.SolrTemplate;
-import org.springframework.util.CollectionUtils;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.UserConfigConstants.ID;
 import static org.apache.ambari.logsearch.solr.SolrConstants.UserConfigConstants.USER_NAME;
@@ -147,61 +143,43 @@ public class UserConfigSolrDao extends SolrDaoBase {
     return updateResoponse;
   }
 
-	@SuppressWarnings("unchecked")
   public LogFeederDataMap getUserFilter() throws SolrServerException, IOException {
-
     SolrQuery solrQuery = new SolrQuery();
     solrQuery.setQuery("*:*");
-    String fq = ROW_TYPE + ":" + LogSearchConstants.LOGFEEDER_FILTER_NAME;
-    solrQuery.setFilterQueries(fq);
+    solrQuery.setFilterQueries(ROW_TYPE + ":" + LogSearchConstants.LOGFEEDER_FILTER_NAME);
 
     QueryResponse response = process(solrQuery);
     SolrDocumentList documentList = response.getResults();
     LogFeederDataMap logfeederDataMap = null;
-    if (!CollectionUtils.isEmpty(documentList)) {
+    if (CollectionUtils.isNotEmpty(documentList)) {
       SolrDocument configDoc = documentList.get(0);
-      String configJson = JSONUtil.objToJson(configDoc);
-      HashMap<String, Object> configMap = JSONUtil.jsonToMapObject(configJson);
-      String json = (String) configMap.get(VALUES);
+      String json = (String) configDoc.get(VALUES);
       logfeederDataMap = (LogFeederDataMap) JSONUtil.jsonToObj(json, LogFeederDataMap.class);
       logfeederDataMap.setId("" + configDoc.get(ID));
-
     } else {
+      logfeederDataMap = initUserFilter();
+    }
+    return logfeederDataMap;
+  }
+
+  private LogFeederDataMap initUserFilter() throws SolrServerException, IOException {
+    LogFeederDataMap logfeederDataMap = new LogFeederDataMap();
+    
+    Set<String> logIds = HadoopServiceConfigHelper.getAllLogIds();
+    if (logIds != null) {
+      logfeederDataMap.setFilter(new TreeMap<String, LogfeederFilterData>());
+      logfeederDataMap.setId(Long.toString(System.currentTimeMillis()));
       List<String> logfeederDefaultLevels = solrUserConfig.getLogLevels();
-      JSONArray levelJsonArray = new JSONArray(logfeederDefaultLevels);
-
-      String hadoopServiceString = getHadoopServiceConfigJSON();
-      String key = null;
-      JSONArray componentArray = null;
-      try {
-        JSONObject componentList = new JSONObject();
-        JSONObject jsonValue = new JSONObject();
-
-        JSONObject hadoopServiceJsonObject = new JSONObject(hadoopServiceString).getJSONObject("service");
-        Iterator<String> hadoopSerivceKeys = hadoopServiceJsonObject.keys();
-        while (hadoopSerivceKeys.hasNext()) {
-          key = hadoopSerivceKeys.next();
-          componentArray = hadoopServiceJsonObject.getJSONObject(key).getJSONArray("components");
-          for (int i = 0; i < componentArray.length(); i++) {
-            JSONObject compJsonObject = (JSONObject) componentArray.get(i);
-            String componentName = compJsonObject.getString("name");
-            JSONObject innerContent = new JSONObject();
-            innerContent.put("label", componentName);
-            innerContent.put("hosts", new JSONArray());
-            innerContent.put("defaultLevels", levelJsonArray);
-            componentList.put(componentName, innerContent);
-          }
-        }
-        jsonValue.put("filter", componentList);
-        logfeederDataMap = (LogFeederDataMap) JSONUtil.jsonToObj(jsonValue.toString(), LogFeederDataMap.class);
-        logfeederDataMap.setId(""+new Date().getTime());
-        saveUserFilter(logfeederDataMap);
-
-      } catch (JsonParseException | JSONException je) {
-        LOG.error("Error parsing JSON. key=" + key + ", componentArray=" + componentArray, je);
-        logfeederDataMap = new LogFeederDataMap();
+      
+      for (String logId : logIds) {
+        LogfeederFilterData logfeederFilterData = new LogfeederFilterData();
+        logfeederFilterData.setLabel(logId);
+        logfeederFilterData.setDefaultLevels(logfeederDefaultLevels);
+        logfeederDataMap.getFilter().put(logId, logfeederFilterData);
       }
+      saveUserFilter(logfeederDataMap);
     }
+    
     return logfeederDataMap;
   }
 
@@ -209,31 +187,4 @@ public class UserConfigSolrDao extends SolrDaoBase {
   public SolrSchemaFieldDao getSolrSchemaFieldDao() {
     return solrSchemaFieldDao;
   }
-
-  private String getHadoopServiceConfigJSON() {
-    StringBuilder result = new StringBuilder("");
-
-    // Get file from resources folder
-    ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
-
-    try (Scanner scanner = new Scanner(file)) {
-
-      while (scanner.hasNextLine()) {
-        String line = scanner.nextLine();
-        result.append(line).append("\n");
-      }
-
-      scanner.close();
-
-    } catch (IOException e) {
-      LOG.error("Unable to read HadoopServiceConfig.json", e);
-    }
-
-    String hadoopServiceConfig = result.toString();
-    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
-      return hadoopServiceConfig;
-    }
-    return null;
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
index 6c280ac..89873f3 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ManagerBase.java
@@ -19,19 +19,13 @@
 
 package org.apache.ambari.logsearch.manager;
 
-import java.io.File;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import java.util.Scanner;
 
-import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.model.response.LogData;
 import org.apache.ambari.logsearch.model.response.LogSearchResponse;
 import org.apache.ambari.logsearch.dao.SolrDaoBase;
-import org.apache.ambari.logsearch.util.JSONUtil;
-import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -47,35 +41,6 @@ public abstract class ManagerBase<LOG_DATA_TYPE extends LogData, SEARCH_RESPONSE
   public ManagerBase() {
     super();
   }
-
-  public String getHadoopServiceConfigJSON() {
-    StringBuilder result = new StringBuilder("");
-
-    // Get file from resources folder
-    ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("HadoopServiceConfig.json").getFile());
-
-    try (Scanner scanner = new Scanner(file)) {
-
-      while (scanner.hasNextLine()) {
-        String line = scanner.nextLine();
-        result.append(line).append("\n");
-      }
-
-      scanner.close();
-
-    } catch (IOException e) {
-      logger.error("Unable to read HadoopServiceConfig.json", e);
-      throw RESTErrorUtil.createRESTException(e.getMessage(), MessageEnums.ERROR_SYSTEM);
-    }
-
-    String hadoopServiceConfig = result.toString();
-    if (JSONUtil.isJSONValid(hadoopServiceConfig)) {
-      return hadoopServiceConfig;
-    }
-    throw RESTErrorUtil.createRESTException("Improper JSON", MessageEnums.ERROR_SYSTEM);
-
-  }
   
   protected SEARCH_RESPONSE getLastPage(SolrDaoBase solrDoaBase, SimpleQuery lastPageQuery, String event) {
     int maxRows = lastPageQuery.getRows();

http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
index 74c549a..44d0c00 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/manager/ServiceLogsManager.java
@@ -28,6 +28,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Scanner;
 
 import javax.inject.Inject;
 import javax.inject.Named;
@@ -38,6 +39,8 @@ import com.google.common.collect.Lists;
 import freemarker.template.Configuration;
 import freemarker.template.Template;
 import freemarker.template.TemplateException;
+
+import org.apache.ambari.logsearch.common.HadoopServiceConfigHelper;
 import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.MessageEnums;
 import org.apache.ambari.logsearch.dao.ServiceLogsSolrDao;
@@ -68,6 +71,7 @@ import org.apache.ambari.logsearch.solr.model.SolrComponentTypeLogData;
 import org.apache.ambari.logsearch.solr.model.SolrHostLogData;
 import org.apache.ambari.logsearch.solr.model.SolrServiceLogData;
 import org.apache.ambari.logsearch.util.DownloadUtil;
+import org.apache.ambari.logsearch.util.JSONUtil;
 import org.apache.ambari.logsearch.util.DateUtil;
 import org.apache.ambari.logsearch.util.RESTErrorUtil;
 import org.apache.ambari.logsearch.util.SolrUtil;
@@ -567,4 +571,13 @@ public class ServiceLogsManager extends ManagerBase<SolrServiceLogData, ServiceL
     }
     throw new UnsupportedOperationException();
   }
+  
+
+  public String getHadoopServiceConfigJSON() {
+    String hadoopServiceConfigJSON = HadoopServiceConfigHelper.getHadoopServiceConfigJSON();
+    if (hadoopServiceConfigJSON == null) {
+      throw RESTErrorUtil.createRESTException("Could not load HadoopServiceConfig.json", MessageEnums.ERROR_SYSTEM);
+    }
+    return hadoopServiceConfigJSON;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java
index b09610c..cc7d53d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/common/LogFeederDataMap.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logsearch.model.common;
 import io.swagger.annotations.ApiModel;
 import io.swagger.annotations.ApiModelProperty;
 
-import java.util.HashMap;
+import java.util.TreeMap;
 
 @ApiModel
 public class LogFeederDataMap {
@@ -30,13 +30,13 @@ public class LogFeederDataMap {
   private String id;
 
   @ApiModelProperty
-  private HashMap<String, LogfeederFilterData> filter;
+  private TreeMap<String, LogfeederFilterData> filter;
 
-  public HashMap<String, LogfeederFilterData> getFilter() {
+  public TreeMap<String, LogfeederFilterData> getFilter() {
     return filter;
   }
 
-  public void setFilter(HashMap<String, LogfeederFilterData> filter) {
+  public void setFilter(TreeMap<String, LogfeederFilterData> filter) {
     this.filter = filter;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3114b1c6/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
index 9bdf0fa..bcab975 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/filter/CreateLogfeederFilterView.js
@@ -133,14 +133,30 @@ define(['require',
             	this.ui.loader.hide();
             },
             renderComponents : function(){
-            	var that =this;
-            	_.each(that.componentsList.models, function(model){
-                    var levels='<td align="left">'+model.get("type")+'</td>';
-                    var override = '<td class="text-left"><span class="pull-left"><!--small><i>Override</i></small--> <input data-override type="checkbox" data-name='+model.get("type")+'></span></td>';
-                    levels +=  override + that.getLevelForComponent(model.get("type"),false);
-                    var html = '<tr class="overrideSpacer"></tr><tr class="componentRow borderShow" data-component="'+model.get("type")+'">'+levels+'</tr><tr></tr>';
-                    that.ui.filterContent.append(html);
-                });
+              var that = this;
+              var set = new Set();
+              _.each(that.componentsList.models, function(model){
+                that.createRow(model.get("type"), that);
+                set.add(model.get("type"));
+              });
+              
+              if (set.size > 0) {
+                that.ui.filterContent.append('<tr class="overrideSpacer"></tr><tr class="overrideSpacer"></tr><tr class="overrideSpacer"></tr>');
+              }
+              
+              var components = this.model.get("filter");
+              _.each(components,function(value,key){
+                if (!set.has(key)) {
+                  that.createRow(key, that);
+                }
+              });
+            },
+            createRow : function(type, that) {
+              var levels = '<td align="left">'+type+'</td>';
+              var override = '<td class="text-left"><span class="pull-left"><!--small><i>Override</i></small--> <input data-override type="checkbox" data-name='+type+'></span></td>';
+              levels +=  override + that.getLevelForComponent(type,false);
+              var html = '<tr class="overrideSpacer"></tr><tr class="componentRow borderShow" data-component="'+type+'">'+levels+'</tr><tr></tr>';
+              that.ui.filterContent.append(html);
             },
             populateValues : function(){
             	var that =this;
@@ -332,34 +348,35 @@ define(['require',
 
             },
             setValues : function(){
-            	var obj = {filter: {}},that= this;
-            	_.each(that.componentsList.models, function(model){
-            		var comp = model.get("type"),date = that.$("[data-date='"+comp+"']").data("daterangepicker");
-            		var host = (that.$("[data-host='"+comp+"']").length) ? that.$("[data-host='"+comp+"']").select2('val') : [];
-            		obj.filter[comp] = {
-            				label : comp,
-            				hosts: host,
-            				defaultLevels : that.getDefaultValues(comp),
-            				overrideLevels : that.getOverideValues(comp),
-            				expiryTime : (date && date.startDate) ? date.startDate.toJSON() : ""
-            		};
-            	});
-            	return (obj);
+              var obj = {filter: {}},that = this;
+              var components = this.model.get("filter");
+              _.each(components,function(value,key){
+                var date = that.$("[data-date='"+key+"']").data("daterangepicker");
+                var host = (that.$("[data-host='"+key+"']").length) ? that.$("[data-host='"+key+"']").select2('val') : [];
+                obj.filter[key] = {
+                    label : key,
+                    hosts: host,
+                    defaultLevels : that.getDefaultValues(key),
+                    overrideLevels : that.getOverideValues(key),
+                    expiryTime : (date && date.startDate) ? date.startDate.toJSON() : ""
+                };
+              });
+              return (obj);
             },
             getOverideValues : function(ofComponent){
-            	var $els = this.$("tr.overrideRow."+ofComponent).find("input:checked"),values=[];
-            	for(var i=0; i<$els.length; i++){
-            		values.push($($els[i]).data("id"));
-            	}
-            	return values;
+              var $els = this.$("tr.overrideRow."+ofComponent).find("input:checked"),values=[];
+              for(var i=0; i<$els.length; i++){
+                values.push($($els[i]).data("id"));
+              }
+              return values;
             },
             getDefaultValues : function(ofComponent){
-            	var $els = this.$("tr[data-component='"+ofComponent+"']").find("input:checked"),values=[];
-            	for(var i=0; i<$els.length; i++){
-            		if($($els[i]).data("id"))
-            			values.push($($els[i]).data("id"));
-            	}
-            	return values;
+              var $els = this.$("tr[data-component='"+ofComponent+"']").find("input:checked"),values=[];
+              for(var i=0; i<$els.length; i++){
+                if($($els[i]).data("id"))
+                  values.push($($els[i]).data("id"));
+              }
+              return values;
             }
         });
 


[6/6] ambari git commit: AMBARI-18548. Declarative Logsearch/Logfeeder Component Metadata for Stack Component (oleewere)

Posted by ol...@apache.org.
AMBARI-18548. Declarative Logsearch/Logfeeder Component Metadata for Stack Component (oleewere)

Change-Id: Iba44ef785a06e0b67e6b96c537fd055847a45a59


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d5e152af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d5e152af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d5e152af

Branch: refs/heads/logsearch-ga
Commit: d5e152afc4598a2621525a04e666258b21b97c86
Parents: 3114b1c
Author: oleewere <ol...@gmail.com>
Authored: Wed Oct 19 17:36:02 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Wed Oct 19 17:36:02 2016 +0200

----------------------------------------------------------------------
 .../AuditComponentsRequestQueryConverter.java   |  31 +-
 .../BaseServiceLogRequestQueryConverter.java    |   5 -
 .../ambari/logsearch/dao/SolrDaoBase.java       |   1 +
 .../request/impl/AuditComponentRequest.java     |   2 +-
 .../main/webapp/scripts/views/common/Header.js  |   2 +-
 .../scripts/views/dashboard/MainLayoutView.js   |   8 +-
 .../scripts/views/tabs/ComparisonLayoutView.js  |   6 +-
 ...AuditComponentRequestQueryConverterTest.java |   4 +-
 ...BaseServiceLogRequestQueryConverterTest.java |   5 +-
 .../configuration/accumulo-logsearch-conf.xml   | 124 ++++
 .../configuration/infra-logsearch-conf.xml      |  80 +++
 .../0.1.0/configuration/ams-logsearch-conf.xml  | 201 +++++++
 .../configuration/atlas-logsearch-conf.xml      |  80 +++
 .../configuration/falcon-logsearch-conf.xml     |  80 +++
 .../configuration/flume-logsearch-conf.xml      |  80 +++
 .../configuration/hbase-logsearch-conf.xml      | 111 ++++
 .../configuration/hdfs-logsearch-conf.xml       | 248 ++++++++
 .../configuration/hive-logsearch-conf.xml       | 117 ++++
 .../configuration/zookeeper-logsearch-conf.xml  | 124 ++++
 .../configuration/knox-logsearch-conf.xml       |  93 +++
 .../configuration/logfeeder-system_log-env.xml  |  59 ++
 .../0.5.0/package/scripts/logfeeder.py          |   1 -
 .../0.5.0/package/scripts/logsearch.py          |   1 -
 .../scripts/logsearch_config_aggregator.py      |  67 +++
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  59 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |  17 +-
 .../templates/HadoopServiceConfig.json.j2       | 582 +++----------------
 .../templates/input.config-accumulo.json.j2     | 105 ----
 .../templates/input.config-ambari.json.j2       |   2 +-
 .../package/templates/input.config-ams.json.j2  | 191 ------
 .../templates/input.config-atlas.json.j2        |  55 --
 .../templates/input.config-falcon.json.j2       |  55 --
 .../templates/input.config-flume.json.j2        |  56 --
 .../templates/input.config-hbase.json.j2        |  91 ---
 .../package/templates/input.config-hdfs.json.j2 | 251 --------
 .../package/templates/input.config-hive.json.j2 |  97 ----
 .../templates/input.config-infra.json.j2        |  56 --
 .../templates/input.config-kafka.json.j2        | 105 ----
 .../package/templates/input.config-knox.json.j2 |  68 ---
 .../templates/input.config-oozie.json.j2        |  56 --
 .../templates/input.config-ranger.json.j2       | 122 ----
 .../templates/input.config-spark.json.j2        |  72 ---
 .../templates/input.config-spark2.json.j2       |  66 ---
 .../templates/input.config-storm.json.j2        |  86 ---
 .../package/templates/input.config-yarn.json.j2 |  86 ---
 .../templates/input.config-zeppelin.json.j2     |  56 --
 .../templates/input.config-zookeeper.json.j2    |  56 --
 .../LOGSEARCH/0.5.0/properties/secure_log.json  |  31 +
 .../0.5.0/properties/system_messages.json       |  31 +
 .../configuration/oozie-logsearch-conf.xml      |  80 +++
 .../configuration/ranger-logsearch-conf.xml     | 111 ++++
 .../configuration/ranger-kms-logsearch-conf.xml |  80 +++
 .../configuration/spark-logsearch-conf.xml      |  98 ++++
 .../configuration/spark2-logsearch-conf.xml     |  92 +++
 .../configuration/storm-logsearch-conf.xml      | 110 ++++
 .../mapred-logsearch-conf.xml                   |  80 +++
 .../configuration/yarn-logsearch-conf.xml       | 104 ++++
 .../configuration/zeppelin-logsearch-conf.xml   |  80 +++
 .../configuration/zookeeper-logsearch-conf.xml  |  76 +++
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |   9 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   1 -
 .../test/python/stacks/2.4/configs/default.json |   8 +-
 62 files changed, 2583 insertions(+), 2328 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java
index d81bdbf..bf77444 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/AuditComponentsRequestQueryConverter.java
@@ -18,45 +18,36 @@
  */
 package org.apache.ambari.logsearch.converter;
 
-import org.apache.ambari.logsearch.common.LogSearchConstants;
 import org.apache.ambari.logsearch.common.LogType;
 import org.apache.ambari.logsearch.model.request.impl.AuditComponentRequest;
-import org.apache.commons.lang3.StringUtils;
-import org.springframework.data.domain.Sort;
 import org.springframework.data.solr.core.query.FacetOptions;
-import org.springframework.data.solr.core.query.SimpleFacetQuery;
 
 import javax.inject.Named;
 
 import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_COMPONENT;
+import static org.apache.ambari.logsearch.solr.SolrConstants.AuditLogConstants.AUDIT_EVTTIME;
 
 @Named
-public class AuditComponentsRequestQueryConverter extends AbstractSearchRequestQueryConverter<AuditComponentRequest, SimpleFacetQuery> {
+public class AuditComponentsRequestQueryConverter extends AbstractLogRequestFacetQueryConverter<AuditComponentRequest> {
 
   @Override
-  public SimpleFacetQuery extendSolrQuery(AuditComponentRequest request, SimpleFacetQuery query) {
-    FacetOptions facetOptions = new FacetOptions(); // TODO: check that date filtering is needed or not
-    facetOptions.addFacetOnField(AUDIT_COMPONENT);
-    facetOptions.setFacetSort(FacetOptions.FacetSort.INDEX);
-    facetOptions.setFacetLimit(-1);
-    query.setFacetOptions(facetOptions);
-    return query;
+  public FacetOptions.FacetSort getFacetSort() {
+    return FacetOptions.FacetSort.INDEX;
   }
 
   @Override
-  public Sort sort(AuditComponentRequest request) {
-    Sort.Direction direction = StringUtils.equals(request.getSortType(), LogSearchConstants.DESCENDING_ORDER)
-      ? Sort.Direction.DESC : Sort.Direction.ASC;
-    return new Sort(new Sort.Order(direction, AUDIT_COMPONENT));
+  public String getDateTimeField() {
+    return AUDIT_EVTTIME;
   }
 
   @Override
-  public SimpleFacetQuery createQuery() {
-    return new SimpleFacetQuery();
+  public LogType getLogType() {
+    return LogType.AUDIT;
   }
 
   @Override
-  public LogType getLogType() {
-    return LogType.AUDIT;
+  public void appendFacetOptions(FacetOptions facetOptions, AuditComponentRequest request) {
+    facetOptions.addFacetOnField(AUDIT_COMPONENT);
+    facetOptions.setFacetLimit(-1);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
index 8270f2d..2601f72 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverter.java
@@ -81,9 +81,4 @@ public class BaseServiceLogRequestQueryConverter extends AbstractServiceLogReque
   public LogType getLogType() {
     return LogType.SERVICE;
   }
-
-  @Override
-  public void addComponentFilters(BaseServiceLogRequest request, SimpleQuery query) {
-
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 77eb51f..39b65ae 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -51,6 +51,7 @@ public abstract class SolrDaoBase {
 
   public QueryResponse process(SolrQuery solrQuery, String event) {
     SolrUtil.removeDoubleOrTripleEscapeFromFilters(solrQuery);
+    LOG.info("Solr query will be processed: " + solrQuery);
     if (getSolrClient() != null) {
       event = event == null ? solrQuery.get("event") : event;
       solrQuery.remove("event");

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java
index 94cb255..636c492 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/model/request/impl/AuditComponentRequest.java
@@ -21,5 +21,5 @@ package org.apache.ambari.logsearch.model.request.impl;
 import org.apache.ambari.logsearch.common.Marker;
 
 @Marker
-public class AuditComponentRequest extends CommonSearchRequest {
+public class AuditComponentRequest extends BaseLogRequest {
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
index 1e469d5..deb0e28 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/common/Header.js
@@ -168,7 +168,7 @@ define(['require',
                 require(['views/filter/CreateLogfeederFilterView'],function(CreateLogfeederFilter){
                     var view = new CreateLogfeederFilter({});
                     var options = {
-                        title: "Log Feeder Log Levels",
+                        title: "Log Feeder Log Levels Filter",
                         content: view,
                         viewType: 'Filter',
                         resizable: false,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
index aa79ead..2333c53 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/dashboard/MainLayoutView.js
@@ -174,8 +174,8 @@ define(['require',
 			require(['views/tabs/ComparisonLayoutView'], function(ComparisonLayoutView){
 				var tabName = "";
 				_.each(view.componetList,function(object){
-					if(object.host && object.component){
-						tabName += (object.host + object.component).replace(/\./g,"_");
+					if(object.host_name && object.component_name){
+						tabName += (object.host_name + object.component_name).replace(/\./g,"_");
 					}
 				});
 				if(_.isUndefined(that[tabName])){
@@ -511,10 +511,10 @@ define(['require',
 			        var component = dataValue.component || dataValue.node;
 			        var spanLength = this.$('.compare .panel-body span.hasNode');
 			        if (spanLength.length != 0 && spanLength.length >= 1) {
-			            this.componetList.push({ 'host': host, 'component': component, id: clickedId });
+			            this.componetList.push({ 'host_name': host, 'component_name': component, id: clickedId });
 			            this.$('.compare .panel-body .hostCompList').append('<span class="hasNode" data-id="' + clickedId + '"><i class=" closeComponent fa fa-times-circle"></i>' + host.split(".")[0] + ' <i class="fa fa-angle-double-right"></i><br> ' + component + '</span>');
 			        } else {
-			            this.componetList.push({ 'host': host, 'component': component, id: clickedId });
+			            this.componetList.push({ 'host_name': host, 'component_name': component, id: clickedId });
 			            this.$('.compare .panel-body .hostCompList').html('<span class="hasNode" data-id="' + clickedId + '"><i class=" closeComponent fa fa-times-circle"></i>' + host.split(".")[0] + ' <i class="fa fa-angle-double-right"></i><br> ' + component + '</span>');
 			        }
 			    }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonLayoutView.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonLayoutView.js b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonLayoutView.js
index 526e3c1..73cca56 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonLayoutView.js
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/webapp/scripts/views/tabs/ComparisonLayoutView.js
@@ -61,7 +61,7 @@ define(['require',
                     if (that.componetList) {
                         var $parent = that.ui.comparisonTab;
                         _.each(that.componetList, function(object,i) {
-                            var id = (object.host + '_' + object.component).replace(/\./g, "_");
+                            var id = (object.host_name + '_' + object.component_name).replace(/\./g, "_");
                             if(i % 2 == 0 && i > 0){
                                 var $div = $("<div class='row comparisonTab'></div>");
                                 that.ui.comparisonTab.parent().append($div);
@@ -76,8 +76,8 @@ define(['require',
                             region.show(new ComparisonView({
                                 globalVent: that.globalVent,
                                 params: _.extend({},that.params, {
-                                    'host': object.host,
-                                    'component': object.component
+                                    'host_name': object.host_name,
+                                    'component_name': object.component_name
                                 }),
                                 datePickerPosition:(((i+1) % 2 == 0)?("left"):("right"))
                             }));

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java
index d6ecdd5..0c6e975 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/AuditComponentRequestQueryConverterTest.java
@@ -46,7 +46,7 @@ public class AuditComponentRequestQueryConverterTest extends AbstractRequestConv
     SimpleFacetQuery facetQuery = underTest.convert(request);
     SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(facetQuery);
     // THEN
-    assertEquals("?q=*%3A*&start=0&rows=25&sort=repo+asc&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo",
+    assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo",
       query.toQueryString());
   }
 
@@ -59,7 +59,7 @@ public class AuditComponentRequestQueryConverterTest extends AbstractRequestConv
     SolrQuery query = new DefaultQueryParser().doConstructSolrQuery(facetQuery);
     // THEN
     assertNotNull(facetQuery);
-    assertEquals("?q=*%3A*&start=0&rows=99999&sort=repo+asc&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo",
+    assertEquals("?q=*%3A*&rows=0&fq=evtTime%3A%5B*+TO+*%5D&facet=true&facet.mincount=1&facet.limit=-1&facet.sort=index&facet.field=repo",
       query.toQueryString());
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java
index a75037b..3775a2d 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/test/java/org/apache/ambari/logsearch/converter/BaseServiceLogRequestQueryConverterTest.java
@@ -52,8 +52,9 @@ public class BaseServiceLogRequestQueryConverterTest extends AbstractRequestConv
     SolrQuery solrQuery = defaultQueryParser.doConstructSolrQuery(query);
     SolrUtil.removeDoubleOrTripleEscapeFromFilters(solrQuery);
     // THEN
-    assertEquals("?q=*%3A*&start=0&rows=25&fq=log_message%3A*myincludemessage*&fq=-log_message%3A*myexcludemessage*" +
-      "&fq=host%3Alogsearch.com&fq=path%3A%5C%2Fvar%5C%2Flog%5C%2Fmyfile%5C-%5C*%5C-hdfs.log&fq=type%3Acomponent&fq=level%3A%28FATAL+ERROR+WARN+UNKNOWN%29" +
+    assertEquals("?q=*%3A*&start=0&rows=25&fq=type%3A%28logsearch_app+secure_log%29&fq=-type%3A%28hst_agent+system_message%29" +
+      "&fq=log_message%3A*myincludemessage*&fq=-log_message%3A*myexcludemessage*&fq=host%3Alogsearch.com" +
+      "&fq=path%3A%5C%2Fvar%5C%2Flog%5C%2Fmyfile%5C-%5C*%5C-hdfs.log&fq=type%3Acomponent&fq=level%3A%28FATAL+ERROR+WARN+UNKNOWN%29" +
       "&fq=logtime%3A%5B2016-09-13T22%3A00%3A01.000Z+TO+2016-09-14T22%3A00%3A01.000Z%5D&sort=logtime+desc%2Cseq_num+desc",
       solrQuery.toQueryString());
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
new file mode 100644
index 0000000..ff4f695
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Accumulo</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>ACCUMULO_MASTER:accumulo_master;ACCUMULO_MONITOR:accumulo_monitor;ACCUMULO_GC:accumulo_gc;ACCUMULO_TRACER:accumulo_tracer;ACCUMULO_TSERVER:accumulo_tserver</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"accumulo_gc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/gc_*.log"
+    },
+    {
+      "type":"accumulo_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/master_*.log"
+    },
+    {
+      "type":"accumulo_monitor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/monitor_*.log"
+    },
+    {
+      "type":"accumulo_tracer",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tracer_*.log"
+    },
+    {
+      "type":"accumulo_tserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tserver_*.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "accumulo_master"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "comment":"This one has one extra space after LEVEL",
+      "conditions":{
+        "fields":{
+          "type":[
+            "accumulo_gc",
+            "accumulo_monitor",
+            "accumulo_tracer",
+            "accumulo_tserver"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
new file mode 100644
index 0000000..1de8c46
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Infra</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>INFRA_SOLR:infra_solr</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"infra_solr",
+      "rowtype":"service",
+      "path":"{{default('/configurations/infra-solr-env/infra_solr_log_dir', '/var/log/ambari-infra-solr')}}/solr.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "infra_solr"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
+  </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
new file mode 100644
index 0000000..72d44db
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
@@ -0,0 +1,201 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>AMS</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>METRICS_COLLECTOR:ams_collector,ams_hbase_master,ams_hbase_regionserver;METRICS_MONITOR:ams_monitor;METRICS_GRAFANA:ams_grafana</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"ams_hbase_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-master-*.log"
+    },
+    {
+      "type":"ams_hbase_regionserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-regionserver-*.log"
+    },
+    {
+      "type":"ams_collector",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/ambari-metrics-collector.log"
+    },
+    {
+      "type":"ams_monitor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_monitor_log_dir', '/var/log/ambari-metrics-monitor')}}/ambari-metrics-monitor.out"
+    },
+    {
+      "type":"ams_grafana",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-grafana-env/metrics_grafana_log_dir', '/var/log/ambari-metrics-grafana')}}/grafana.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_collector"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %p %c: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_hbase_master",
+            "ams_hbase_regionserver"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_grafana"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{DATESTAMP:logtime})",
+      "message_pattern":"(?m)^%{DATESTAMP:logtime}%{SPACE}\\[%{WORD:level}\\]%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy/MM/dd HH:mm:ss"
+          }
+         },
+        "level":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"I",
+              "post_value":"INFO"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"W",
+              "post_value":"WARN"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"D",
+              "post_value":"DEBUG"
+             }
+           },
+           {
+             "map_fieldvalue":{
+               "pre_value":"E",
+               "post_value":"ERROR"
+             }
+           },
+           {
+             "map_fieldvalue":{
+               "pre_value":"F",
+               "post_value":"FATAL"
+             }
+           }
+         ]
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_monitor"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       },
+      "level":[
+        {
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+        }
+      ]
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
new file mode 100644
index 0000000..71a08fb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Atlas</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>ATLAS_SERVER:atlas_app</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"atlas_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/atlas-env/metadata_log_dir', '/var/log/atlas')}}/application.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "atlas_app"
+          ]
+         }
+       },
+      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
new file mode 100644
index 0000000..6b43ba6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Falcon</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>FALCON_SERVER:falcon_app</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"falcon_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/falcon-env/falcon_log_dir', '/var/log/falcon')}}/falcon.application.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "falcon_app"
+          ]
+         }
+       },
+      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
new file mode 100644
index 0000000..98e6db8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
@@ -0,0 +1,80 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Flume</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>FLUME:flume_handler</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"flume_handler",
+      "rowtype":"service",
+      "path":"{{default('/configurations/flume-env/flume_log_dir', '/var/log/flume')}}/flume.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "flume_handler"
+          ]
+         }
+       },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}\\(%{JAVACLASS:class}\\.%{JAVAMETHOD:method}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
new file mode 100644
index 0000000..321ea4e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>HBase</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>HBASE_MASTER:hbase_master;HBASE_REGIONSERVER:hbase_regionserver;PHOENIX_QUERY_SERVER:hbase_phoenix_server</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"hbase_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-master-*.log"
+    },
+    {
+      "type":"hbase_regionserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-regionserver-*.log"
+    },
+    {
+      "type":"hbase_phoenix_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/phoenix-*-server.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_master",
+            "hbase_regionserver"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_phoenix_server"
+          ]
+         }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
new file mode 100644
index 0000000..d85a028
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
@@ -0,0 +1,248 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>HDFS</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>NAMENODE:hdfs_namenode;DATANODE:hdfs_datanode;SECONDARY_NAMENODE:hdfs_secondarynamenode;JOURNALNODE:hdfs_journalnode;ZKFC:hdfs_zkfc;NFS_GATEWAY:hdfs_nfs3</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"hdfs_datanode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-datanode-*.log"
+    },
+    {
+      "type":"hdfs_namenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-namenode-*.log"
+    },
+    {
+      "type":"hdfs_journalnode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-journalnode-*.log"
+    },
+    {
+      "type":"hdfs_secondarynamenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-secondarynamenode-*.log"
+    },
+    {
+      "type":"hdfs_zkfc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-zkfc-*.log"
+    },
+    {
+      "type":"hdfs_nfs3",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-nfs3-*.log"
+    },
+    {
+      "type":"hdfs_audit",
+      "rowtype":"audit",
+      "is_enabled":"true",
+      "add_fields":{
+        "logType":"HDFSAudit",
+        "enforcer":"hadoop-acl",
+        "repoType":"1",
+        "repo":"hdfs"
+      },
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hdfs-audit.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_datanode",
+            "hdfs_journalnode",
+            "hdfs_secondarynamenode",
+            "hdfs_namenode",
+            "hdfs_zkfc",
+            "hdfs_nfs3"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+         }
+       },
+      "source_field":"log_message",
+      "value_split":"=",
+      "field_split":"\t",
+      "post_map_values":{
+        "src":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+         },
+        "ip":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+         },
+        "allowed":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"true",
+              "post_value":"1"
+            }
+           },
+          {
+            "map_fieldvalue":{
+              "pre_value":"false",
+              "post_value":"0"
+            }
+           },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+           }
+         ],
+        "cmd":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+         },
+        "proto":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+         },
+        "callerContext":{
+          "map_fieldname":{
+            "new_fieldname":"req_caller_id"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "sort_order":2,
+      "source_field":"ugi",
+      "remove_source_field":"false",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+         }
+       },
+      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+      "post_map_values":{
+        "user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+         },
+        "x_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+         },
+        "p_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+         },
+        "k_user":{
+          "map_fieldname":{
+            "new_fieldname":"proxyUsers"
+          }
+         },
+        "p_authType":{
+          "map_fieldname":{
+            "new_fieldname":"authType"
+          }
+         },
+        "k_authType":{
+          "map_fieldname":{
+            "new_fieldname":"proxyAuthType"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
new file mode 100644
index 0000000..c1b971c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
@@ -0,0 +1,117 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Hive</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>HIVE_METASTORE:hive_metastore;HIVE_SERVER:hive_hiveserver2;WEBHCAT_SERVER:webhcat_server</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"hive_hiveserver2",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hiveserver2.log"
+    },
+    {
+      "type":"hive_metastore",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hivemetastore.log"
+    },
+    {
+      "type": "webhcat_server",
+      "rowntype":"service",
+      "path":"{{default('configurations/hive-env/hcat_log_dir', '/var/log/webhcat')}}/webhcat.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hive_hiveserver2",
+            "hive_metastore"
+          ]
+         }
+       },
+      "log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "webhcat_server"
+          ]
+         }
+       },
+      "log4j_format":" %-5p | %d{DATE} | %c | %m%n",
+      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime})",
+      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime}%{CUSTOM_SEPARATOR}%{JAVACLASS:file}%{CUSTOM_SEPARATOR}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+          }
+         },
+        "level":{
+           "map_fieldvalue":{
+             "pre_value":"WARNING",
+             "post_value":"WARN"
+            }
+        }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/zookeeper-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/zookeeper-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/zookeeper-logsearch-conf.xml
new file mode 100644
index 0000000..29a8b36
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/zookeeper-logsearch-conf.xml
@@ -0,0 +1,124 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Kafka</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>KAFKA_BROKER:kafka_server,kafka_request,kafka_logcleaner,kafka_controller,kafka_statechange</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"kafka_controller",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/controller.log"
+    },
+    {
+      "type":"kafka_request",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/kafka-request.log"
+    },
+    {
+      "type":"kafka_logcleaner",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/log-cleaner.log"
+    },
+    {
+      "type":"kafka_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/server.log"
+    },
+    {
+      "type":"kafka_statechange",
+      "rowtype":"service",
+      "path":"{{kafka_log_dir}}/state-change.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "kafka_controller",
+            "kafka_request",
+            "kafka_logcleaner"
+          ]
+         }
+       },
+      "log4j_format":"[%d] %p %m (%c)%n",
+      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     },
+    {
+      "filter":"grok",
+      "comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
+      "conditions":{
+        "fields":{
+          "type":[
+            "kafka_server",
+            "kafka_statechange"
+          ]
+         }
+       },
+      "log4j_format":"[%d] %p %m (%c)%n",
+      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
new file mode 100644
index 0000000..0bf91ff
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_final="false" supports_adding_forbidden="true">
+  <property>
+    <name>service_name</name>
+    <display-name>Service name</display-name>
+    <description>Service name for Logsearch Portal (label)</description>
+    <value>Knox</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>component_mappings</name>
+    <display-name>Component mapping</display-name>
+    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
+    <value>KNOX_GATEWAY:knox_gateway,knox_cli,knox_ldap</value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>content</name>
+    <display-name>Logfeeder Config</display-name>
+    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
+    <value>
+{
+  "input":[
+    {
+      "type":"knox_gateway",
+      "rowtype":"service",
+      "path":"/var/log/dir/gateway.log"
+    },
+    {
+      "type":"knox_cli",
+      "rowtype":"service",
+      "path":"/var/log/knox/knoxcli.log"
+    },
+    {
+      "type":"knox_ldap",
+      "rowtype":"service",
+      "path":"/var/log/knox/ldap.log"
+    }
+   ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "knox_gateway",
+            "knox_cli",
+            "knox_ldap"
+          ]
+
+        }
+       },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+         }
+       }
+     }
+   ]
+ }
+    </value>
+    <value-attributes>
+      <type>content</type>
+      <show-property-name>false</show-property-name>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-system_log-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-system_log-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-system_log-env.xml
new file mode 100644
index 0000000..b8dd4b9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-system_log-env.xml
@@ -0,0 +1,59 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration supports_adding_forbidden="true">
+
+  <property>
+    <name>logfeeder_system_messages_content</name>
+    <display-name>System messages grok filter</display-name>
+    <description>This is the jinja template for input.config.system_messages.json file</description>
+    <value/>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>system_messages.json</property-file-name>
+      <property-file-type>text</property-file-type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>logfeeder_secure_log_content</name>
+    <display-name>Secure log grok filter</display-name>
+    <description>This is the jinja template for input.config.secure_log.json file</description>
+    <value/>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>secure_log.json</property-file-name>
+      <property-file-type>text</property-file-type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>logfeeder_system_log_enabled</name>
+    <value>false</value>
+    <value-attributes>
+      <type>boolean</type>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
index cfc6c91..fbc29c8 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
@@ -25,7 +25,6 @@ from resource_management.libraries.script.script import Script
 from setup_logfeeder import setup_logfeeder
 from logsearch_common import kill_process
 
-
 class LogFeeder(Script):
   def install(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
index 9c5215d..4410712 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
@@ -24,7 +24,6 @@ from resource_management.libraries.script.script import Script
 from setup_logsearch import setup_logsearch
 from logsearch_common import kill_process
 
-
 class LogSearch(Script):
   def install(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/d5e152af/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_config_aggregator.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_config_aggregator.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_config_aggregator.py
new file mode 100644
index 0000000..2e9dda3
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch_config_aggregator.py
@@ -0,0 +1,67 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management import Logger
+
+def __parse_component_mappings(component_mappings):
+  components = list()
+  component_mappings_list = component_mappings.split(';')
+  if component_mappings_list and len(component_mappings_list) > 0:
+    metadata_list = map(lambda x : x.split(':'), component_mappings_list)
+    if metadata_list and len(metadata_list) > 0:
+      for metadata in metadata_list:
+        if (len(metadata) == 2):
+          logids = metadata[1].split(',')
+          components.extend(logids)
+          Logger.info("Found logids for logsearch component %s - (%s) " % (metadata[0], metadata[1]))
+  return components
+
+def get_logsearch_meta_configs(configurations):
+  logsearch_meta_configs = {}
+  for key, value in configurations.iteritems():  # iter on both keys and values
+    if str(key).endswith('logsearch-conf'):
+      logsearch_meta_configs[key] = value
+      Logger.info("Found logsearch config entry : " + key)
+  return logsearch_meta_configs
+
+def get_logfeeder_metadata(logsearch_meta_configs):
+  """
+  get logfeeder pattern metadata list, an element: (e.g.) :
+  ['service_config_name' : 'pattern json content']
+  """
+  logfeeder_contents = {}
+  for key, value in logsearch_meta_configs.iteritems():
+    if 'content' in logsearch_meta_configs[key]:
+      logfeeder_contents[key] = logsearch_meta_configs[key]['content']
+      Logger.info("Found logfeeder pattern content in " + key)
+  return logfeeder_contents
+
+def get_logsearch_metadata(logsearch_meta_configs):
+  """
+  get logsearch metadata list, an element (e.g.) :
+  ['service_name_key' : {component1 : [logid1, logid2]}, {component2 : [logid1, logid2]}]
+  """
+  logsearch_service_component_mappings = {}
+  for key, value in logsearch_meta_configs.iteritems():
+    if 'service_name' in logsearch_meta_configs[key] and 'component_mappings':
+      service_name = logsearch_meta_configs[key]['service_name']
+      component_mappings = __parse_component_mappings(logsearch_meta_configs[key]['component_mappings'])
+      logsearch_service_component_mappings[service_name] = component_mappings
+
+  return logsearch_service_component_mappings
+