You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mg...@apache.org on 2017/04/12 13:02:32 UTC

[1/5] ambari git commit: AMBARI-20578 Log Search Configuration API (mgergely)

Repository: ambari
Updated Branches:
  refs/heads/trunk 754d6c872 -> 0ac0ba424


http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/configuration/hbase-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/configuration/hbase-logsearch-conf.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/configuration/hbase-logsearch-conf.xml
deleted file mode 100644
index 891445d..0000000
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/configuration/hbase-logsearch-conf.xml
+++ /dev/null
@@ -1,111 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>HBase</value>
-    <on-ambari-upgrade add="false"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>FAKEHBASE_MASTER:hbase_master;FAKEHBASE_REGIONSERVER:hbase_regionserver;FAKEPHOENIX_QUERY_SERVER:hbase_phoenix_server</value>
-    <on-ambari-upgrade add="false"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"hbase_master",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-master-*.log"
-    },
-    {
-      "type":"hbase_regionserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-regionserver-*.log"
-    },
-    {
-      "type":"hbase_phoenix_server",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/phoenix-*-server.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_master",
-            "hbase_regionserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_phoenix_server"
-          ]
-         }
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="false"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/package/templates/input.config-hbase.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/package/templates/input.config-hbase.json.j2 b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/package/templates/input.config-hbase.json.j2
new file mode 100644
index 0000000..94fbc64
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHBASE/package/templates/input.config-hbase.json.j2
@@ -0,0 +1,79 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"hbase_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-master-*.log"
+    },
+    {
+      "type":"hbase_regionserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-regionserver-*.log"
+    },
+    {
+      "type":"hbase_phoenix_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/phoenix-*-server.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_master",
+            "hbase_regionserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_phoenix_server"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hdfs-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hdfs-logsearch-conf.xml b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hdfs-logsearch-conf.xml
deleted file mode 100644
index 96abb55..0000000
--- a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/configuration/hdfs-logsearch-conf.xml
+++ /dev/null
@@ -1,248 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>FAKEHDFS</value>
-    <on-ambari-upgrade add="false"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>FAKENAMENODE:hdfs_namenode;FAKEDATANODE:hdfs_datanode;SECONDARY_FAKENAMENODE:hdfs_secondarynamenode;FAKEJOURNALNODE:hdfs_journalnode;FAKEZKFC:hdfs_zkfc;FAKENFS_GATEWAY:hdfs_nfs3</value>
-    <on-ambari-upgrade add="false"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"hdfs_datanode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-datanode-*.log"
-    },
-    {
-      "type":"hdfs_namenode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-namenode-*.log"
-    },
-    {
-      "type":"hdfs_journalnode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-journalnode-*.log"
-    },
-    {
-      "type":"hdfs_secondarynamenode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-secondarynamenode-*.log"
-    },
-    {
-      "type":"hdfs_zkfc",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-zkfc-*.log"
-    },
-    {
-      "type":"hdfs_nfs3",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-nfs3-*.log"
-    },
-    {
-      "type":"hdfs_audit",
-      "rowtype":"audit",
-      "is_enabled":"true",
-      "add_fields":{
-        "logType":"FAKEHDFSAudit",
-        "enforcer":"hadoop-acl",
-        "repoType":"1",
-        "repo":"hdfs"
-      },
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hdfs-audit.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_datanode",
-            "hdfs_journalnode",
-            "hdfs_secondarynamenode",
-            "hdfs_namenode",
-            "hdfs_zkfc",
-            "hdfs_nfs3"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "evtTime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"keyvalue",
-      "sort_order":1,
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "source_field":"log_message",
-      "value_split":"=",
-      "field_split":"\t",
-      "post_map_values":{
-        "src":{
-          "map_fieldname":{
-            "new_fieldname":"resource"
-          }
-         },
-        "ip":{
-          "map_fieldname":{
-            "new_fieldname":"cliIP"
-          }
-         },
-        "allowed":[
-          {
-            "map_fieldvalue":{
-              "pre_value":"true",
-              "post_value":"1"
-            }
-           },
-          {
-            "map_fieldvalue":{
-              "pre_value":"false",
-              "post_value":"0"
-            }
-           },
-          {
-            "map_fieldname":{
-              "new_fieldname":"result"
-            }
-           }
-         ],
-        "cmd":{
-          "map_fieldname":{
-            "new_fieldname":"action"
-          }
-         },
-        "proto":{
-          "map_fieldname":{
-            "new_fieldname":"cliType"
-          }
-         },
-        "callerContext":{
-          "map_fieldname":{
-            "new_fieldname":"req_caller_id"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "sort_order":2,
-      "source_field":"ugi",
-      "remove_source_field":"false",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
-      "post_map_values":{
-        "user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "x_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "p_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "k_user":{
-          "map_fieldname":{
-            "new_fieldname":"proxyUsers"
-          }
-         },
-        "p_authType":{
-          "map_fieldname":{
-            "new_fieldname":"authType"
-          }
-         },
-        "k_authType":{
-          "map_fieldname":{
-            "new_fieldname":"proxyAuthType"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="false"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/templates/input.config-hdfs.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/templates/input.config-hdfs.json.j2 b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/templates/input.config-hdfs.json.j2
new file mode 100644
index 0000000..af89b90
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PERF/1.0/services/FAKEHDFS/package/templates/input.config-hdfs.json.j2
@@ -0,0 +1,216 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"hdfs_datanode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-datanode-*.log"
+    },
+    {
+      "type":"hdfs_namenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-namenode-*.log"
+    },
+    {
+      "type":"hdfs_journalnode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-journalnode-*.log"
+    },
+    {
+      "type":"hdfs_secondarynamenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-secondarynamenode-*.log"
+    },
+    {
+      "type":"hdfs_zkfc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-zkfc-*.log"
+    },
+    {
+      "type":"hdfs_nfs3",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-nfs3-*.log"
+    },
+    {
+      "type":"hdfs_audit",
+      "rowtype":"audit",
+      "is_enabled":"true",
+      "add_fields":{
+        "logType":"FAKEHDFSAudit",
+        "enforcer":"hadoop-acl",
+        "repoType":"1",
+        "repo":"hdfs"
+      },
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hdfs-audit.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_datanode",
+            "hdfs_journalnode",
+            "hdfs_secondarynamenode",
+            "hdfs_namenode",
+            "hdfs_zkfc",
+            "hdfs_nfs3"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "source_field":"log_message",
+      "value_split":"=",
+      "field_split":"\t",
+      "post_map_values":{
+        "src":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "ip":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+        },
+        "allowed":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"true",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"false",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ],
+        "cmd":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+        },
+        "proto":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+        },
+        "callerContext":{
+          "map_fieldname":{
+            "new_fieldname":"req_caller_id"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "sort_order":2,
+      "source_field":"ugi",
+      "remove_source_field":"false",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+      "post_map_values":{
+        "user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "x_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "p_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "k_user":{
+          "map_fieldname":{
+            "new_fieldname":"proxyUsers"
+          }
+        },
+        "p_authType":{
+          "map_fieldname":{
+            "new_fieldname":"authType"
+          }
+        },
+        "k_authType":{
+          "map_fieldname":{
+            "new_fieldname":"proxyAuthType"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
index a44c2b3..5aef50a 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog300Test.java
@@ -17,6 +17,8 @@
  */
 package org.apache.ambari.server.upgrade;
 
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.eq;
@@ -25,24 +27,39 @@ import static org.easymock.EasyMock.newCapture;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.lang.reflect.Method;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
 
 import javax.persistence.Cache;
 import javax.persistence.EntityManager;
 import javax.persistence.EntityManagerFactory;
 import javax.persistence.EntityTransaction;
 
+import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.AmbariManagementControllerImpl;
 import org.apache.ambari.server.controller.MaintenanceStateHelper;
 import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.stack.OsFamily;
 import org.easymock.Capture;
+import org.easymock.CaptureType;
 import org.easymock.EasyMock;
 import org.easymock.EasyMockRunner;
+import org.easymock.EasyMockSupport;
 import org.easymock.Mock;
 import org.easymock.MockType;
 import org.junit.After;
@@ -51,6 +68,8 @@ import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 import com.google.gson.Gson;
 import com.google.inject.Binder;
 import com.google.inject.Guice;
@@ -79,6 +98,18 @@ public class UpgradeCatalog300Test {
   @Mock(type = MockType.NICE)
   private Configuration configuration;
 
+  @Mock(type = MockType.NICE)
+  private Config config;
+
+  @Mock(type = MockType.NICE)
+  private ActionManager actionManager;
+
+  @Mock(type = MockType.NICE)
+  private Clusters clusters;
+
+  @Mock(type = MockType.NICE)
+  private Cluster cluster;
+
   @Before
   public void init() {
     reset(entityManagerProvider, injector);
@@ -204,4 +235,60 @@ public class UpgradeCatalog300Test {
 
     verify(dbAccessor, entityManager, emFactory, emCache);
   }
+  
+  @Test
+  public void testLogSearchUpdateConfigs() throws Exception {
+    reset(clusters, cluster);
+    expect(clusters.getClusters()).andReturn(ImmutableMap.of("normal", cluster)).once();
+
+    EasyMockSupport easyMockSupport = new EasyMockSupport();
+
+    Injector injector2 = easyMockSupport.createNiceMock(Injector.class);
+    AmbariManagementControllerImpl controller = createMockBuilder(AmbariManagementControllerImpl.class)
+        .addMockedMethod("createConfiguration")
+        .addMockedMethod("getClusters", new Class[]{})
+        .addMockedMethod("createConfig")
+        .withConstructor(actionManager, clusters, injector)
+        .createNiceMock();
+
+    expect(injector2.getInstance(AmbariManagementController.class)).andReturn(controller).anyTimes();
+    expect(controller.getClusters()).andReturn(clusters).anyTimes();
+
+    Config confSomethingElse1 = easyMockSupport.createNiceMock(Config.class);
+    expect(confSomethingElse1.getType()).andReturn("something-else-1");
+    Config confSomethingElse2 = easyMockSupport.createNiceMock(Config.class);
+    expect(confSomethingElse2.getType()).andReturn("something-else-2");
+    Config confLogSearchConf1 = easyMockSupport.createNiceMock(Config.class);
+    expect(confLogSearchConf1.getType()).andReturn("service-1-logsearch-conf");
+    Config confLogSearchConf2 = easyMockSupport.createNiceMock(Config.class);
+    expect(confLogSearchConf2.getType()).andReturn("service-2-logsearch-conf");
+    
+    Map<String, String> oldLogSearchConf = ImmutableMap.of(
+        "service_name", "Service",
+        "component_mappings", "Component Mappings",
+        "content", "Content");
+
+    Collection<Config> configs = Arrays.asList(confSomethingElse1, confLogSearchConf1, confSomethingElse2, confLogSearchConf2);
+    
+    expect(cluster.getAllConfigs()).andReturn(configs).atLeastOnce();
+    expect(cluster.getDesiredConfigByType("service-1-logsearch-conf")).andReturn(confLogSearchConf1).once();
+    expect(cluster.getDesiredConfigByType("service-2-logsearch-conf")).andReturn(confLogSearchConf2).once();
+    expect(confLogSearchConf1.getProperties()).andReturn(oldLogSearchConf).once();
+    expect(confLogSearchConf2.getProperties()).andReturn(oldLogSearchConf).once();
+    Capture<Map<String, String>> logSearchConfCapture = EasyMock.newCapture(CaptureType.ALL);
+    expect(controller.createConfig(anyObject(Cluster.class), anyString(), capture(logSearchConfCapture), anyString(),
+        EasyMock.<Map<String, Map<String, String>>>anyObject())).andReturn(config).times(2);
+
+    replay(clusters, cluster);
+    replay(controller, injector2);
+    replay(confSomethingElse1, confSomethingElse2, confLogSearchConf1, confLogSearchConf2);
+    new UpgradeCatalog300(injector2).updateLogSearchConfigs();
+    easyMockSupport.verifyAll();
+
+    List<Map<String, String>> updatedLogSearchConfs = logSearchConfCapture.getValues();
+    assertEquals(updatedLogSearchConfs.size(), 2);
+    for (Map<String, String> updatedLogSearchConf : updatedLogSearchConfs) {
+      assertTrue(Maps.difference(Collections.<String, String> emptyMap(), updatedLogSearchConf).areEqual());
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
index 0a7f074..662e4a7 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logfeeder.py
@@ -67,15 +67,18 @@ class TestLogFeeder(RMFTestCase):
                               action = ['delete']
                               )
     self.assertResourceCalled('PropertiesFile', '/etc/ambari-logsearch-logfeeder/conf/logfeeder.properties',
-                              properties={'hadoop.security.credential.provider.path': 'jceks://file/etc/ambari-logsearch-logfeeder/conf/logfeeder.jceks',
+                              properties={'cluster.name': 'c1',
+                                          'hadoop.security.credential.provider.path': 'jceks://file/etc/ambari-logsearch-logfeeder/conf/logfeeder.jceks',
                                           'logfeeder.checkpoint.folder': '/etc/ambari-logsearch-logfeeder/conf/checkpoints',
-                                          'logfeeder.config.files': 'output.config.json,input.config-ambari.json,global.config.json,input.config-logsearch.json,input.config-zookeeper.json',
+                                          'logfeeder.config.dir': '/etc/ambari-logsearch-logfeeder/conf',
+                                          'logfeeder.config.files': 'output.config.json,global.config.json',
                                           'logfeeder.metrics.collector.hosts': '',
                                           'logfeeder.metrics.collector.path': '/ws/v1/timeline/metrics',
                                           'logfeeder.metrics.collector.port': '',
                                           'logfeeder.metrics.collector.protocol': '',
                                           'logfeeder.solr.core.config.name': 'history',
-                                          'logfeeder.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr'
+                                          'logfeeder.solr.zk_connect_string': 'c6401.ambari.apache.org:2181/infra-solr',
+                                          'logsearch.config.zk_connect_string': 'c6401.ambari.apache.org:2181'
                                          }
                               )
     self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/logfeeder-env.sh',
@@ -89,6 +92,9 @@ class TestLogFeeder(RMFTestCase):
                               content=InlineTemplate('GP'),
                               encoding='utf-8'
                               )
+    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/global.config.json',
+                              content=Template('global.config.json.j2')
+                              )
     self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/input.config-ambari.json',
                               content=InlineTemplate('ambari-grok-filter'),
                               encoding='utf-8'
@@ -98,22 +104,6 @@ class TestLogFeeder(RMFTestCase):
                               encoding='utf-8'
                               )
 
-    logfeeder_supported_services = ['logsearch']
-
-    logfeeder_config_file_names = ['global.config.json'] + \
-                                  ['input.config-%s.json' % (tag) for tag in logfeeder_supported_services]
-
-    for file_name in logfeeder_config_file_names:
-      self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/' + file_name,
-                                content=Template(file_name + ".j2")
-                                )
-    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/input.config-logfeeder-custom.json',
-                              action=['delete']
-                              )
-    self.assertResourceCalled('File', '/etc/ambari-logsearch-logfeeder/conf/input.config-zookeeper.json',
-                              content=InlineTemplate("pattern content")
-                              )
-
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/logfeeder.py",
                        classname="LogFeeder",

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
index b283a9f..52ac316 100644
--- a/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
+++ b/ambari-server/src/test/python/stacks/2.4/LOGSEARCH/test_logsearch.py
@@ -96,6 +96,7 @@ class TestLogSearch(RMFTestCase):
                                             'logsearch.collection.history.replication.factor': '1',
                                             'logsearch.collection.service.logs.numshards': '10',
                                             'logsearch.collection.service.logs.replication.factor': '1',
+                                            'logsearch.config.zk_connect_string': 'c6401.ambari.apache.org:2181',
                                             'logsearch.login.credentials.file': 'logsearch-admin.json',
                                             'logsearch.protocol': 'http',
                                             'logsearch.roles.allowed': 'AMBARI.ADMINISTRATOR,CLUSTER.ADMINISTRATOR',


[2/5] ambari git commit: AMBARI-20578 Log Search Configuration API (mgergely)

Posted by mg...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
index efa51fa..293a772 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/HadoopServiceConfig.json.j2
@@ -15,84 +15,491 @@
  # See the License for the specific language governing permissions and
  # limitations under the License.
  #}
- {
-   "service": {
-{% if logsearch_metadata and logsearch_metadata.keys()|length > 0 %}
-{%   for metadata_key, component_list in logsearch_metadata.iteritems() %}
-    "{{ metadata_key.lower() }}": {
-      "label": "{{ metadata_key }}",
+{
+  "service": {
+    "accumulo": {
+      "label": "Accumulo",
       "components": [
-{%     for component in component_list %}
         {
-          "name": "{{ component }}"
-        }{% if not loop.last %},{% endif %}
-
-{%     endfor %}
+          "name": "accumulo_gc"
+        },
+        {
+          "name": "accumulo_master"
+        },
+        {
+          "name": "accumulo_monitor"
+        },
+        {
+          "name": "accumulo_tracer"
+        },
+        {
+          "name": "accumulo_tserver"
+        }
       ],
       "dependencies": [
       ]
-    }
-     ,
-{%   endfor %}
-{% endif %}
-{% if logfeeder_system_log_enabled %}
-     "system" : {
-       "label" : "System",
-       "components" : [
+    },
+    "atlas": {
+      "label": "Atlas",
+      "components": [
+        {
+          "name": "atlas_app"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "ambari": {
+      "label": "Ambari",
+      "components": [
+        {
+          "name": "ambari_agent"
+        },
+        {
+          "name": "ambari_server"
+        },
+        {
+          "name": "ambari_alerts"
+        },
+        {
+          "name": "ambari_audit"
+        },
+        {
+          "name": "ambari_config_changes"
+        },
+        {
+          "name": "ambari_eclipselink"
+        },
+        {
+          "name": "ambari_server_check_database"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "ams": {
+      "label": "AMS",
+      "components": [
+        {
+          "name": "ams_hbase_master"
+        },
+        {
+          "name": "ams_hbase_regionserver"
+        },
+        {
+          "name": "ams_collector"
+        },
+        {
+          "name": "ams_monitor"
+        },
+        {
+          "name": "ams_grafana"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "falcon": {
+      "label": "Falcon",
+      "components": [
+        {
+          "name": "falcon_app"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "flume": {
+      "label": "Flume",
+      "components": [
+        {
+          "name": "flume_handler"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "hbase": {
+      "label": "HBase",
+      "components": [
+        {
+          "name": "hbase_master"
+        },
+        {
+          "name": "hbase_regionserver"
+        },
+        {
+          "name": "hbase_phoenix_server"
+        }
+      ],
+      "dependencies": [
+        {
+          "service": "hdfs",
+          "components": [
+            "hdfs_namenode"
+          ]
+        }
+      ]
+    },
+    "hdfs": {
+      "label": "HDFS",
+      "components": [
+        {
+          "name": "hdfs_datanode"
+        },
+        {
+          "name": "hdfs_namenode"
+        },
+        {
+          "name": "hdfs_journalnode"
+        },
+        {
+          "name": "hdfs_secondarynamenode"
+        },
+        {
+          "name": "hdfs_zkfc"
+        },
+        {
+          "name": "hdfs_nfs3"
+        },
+        {
+          "name": "hdfs_audit",
+          "rowtype": "audit"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "hive": {
+      "label": "Hive",
+      "components": [
+        {
+          "name": "hive_hiveserver2"
+        },
+        {
+          "name": "hive_metastore"
+        },
+        {
+          "name": "webhcat_server"
+        }
+      ],
+      "dependencies": [
+        {
+          "service": "hdfs",
+          "components": [
+            "hdfs_namenode"
+          ]
+        }
+      ]
+    },
+    "infra" : {
+      "label" : "Infra",
+      "components": [
+        {
+          "name": "infra_solr"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "kafka": {
+      "label": "Kafka",
+      "components": [
+        {
+          "name": "kafka_controller"
+        },
+        {
+          "name": "kafka_request"
+        },
+        {
+          "name": "kafka_logcleaner"
+        },
+        {
+          "name": "kafka_server"
+        },
+        {
+          "name": "kafka_statechange"
+        }
+      ],
+      "dependencies": [
+        {
+          "service": "zookeeper",
+          "components": [
+            "zookeeper"
+          ]
+        }
+      ]
+    },
+    "knox": {
+      "label": "Knox",
+      "components": [
+        {
+          "name": "knox_gateway"
+        },
+        {
+          "name": "knox_cli"
+        },
+        {
+          "name": "knox_ldap"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "nifi": {
+      "label": "NiFi",
+      "components": [
+        {
+          "name": "nifi_app"
+        },
+        {
+          "name": "nifi_bootstrap"
+        },
+        {
+          "name": "nifi_setup"
+        },
+        {
+          "name": "nifi_user"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "mapred": {
+      "label": "MapReduce",
+      "components": [
+        {
+          "name": "mapred_historyserver"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "logsearch": {
+      "label": "Logsearch",
+      "components": [
+        {
+          "name": "logsearch_app"
+        },
+        {
+          "name": "logsearch_feeder"
+        },
+        {
+          "name": "logsearch_perf"
+        },
+        {
+          "name": "logsearch_solr"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "ranger": {
+      "label": "Ranger",
+      "components": [
+        {
+          "name": "ranger_admin"
+        },
+        {
+          "name": "ranger_dbpatch"
+        },
+        {
+          "name": "ranger_kms"
+        },
+        {
+          "name": "ranger_usersync"
+        }
+      ],
+      "dependencies": [
+        {
+          "service": "hdfs",
+          "required": "optional",
+          "components": [
+            "hdfs_namenode"
+          ]
+        },
+        {
+          "service": "hbase",
+          "required": "optional",
+          "components": [
+            "hbase_master",
+            "hbase_regionserver"
+          ]
+        },
+        {
+          "service": "hive",
+          "required": "optional",
+          "components": [
+            "hive_hiveserver2"
+          ]
+        },
+        {
+          "service": "kafka",
+          "required": "optional",
+          "components": [
+            "kafka_ranger"
+          ]
+        },
+        {
+          "service": "knox",
+          "required": "optional",
+          "components": [
+            "knox_gateway"
+          ]
+        },
+        {
+          "service": "storm",
+          "required": "optional",
+          "components": [
+            "storm_supervisor"
+          ]
+        },
+        {
+          "service": "yarn",
+          "required": "optional",
+          "components": [
+            "yarn_resourcemanager"
+          ]
+        }
+      ]
+    },
+    "oozie": {
+      "label": "Oozie",
+      "components": [
+        {
+          "name": "oozie_app"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "hst": {
+      "label": "SmartSense",
+      "components": [
+        {
+          "name": "hst_server"
+        },
+        {
+          "name": "hst_agent"
+        },
+        {
+          "name": "activity_analyzer"
+        },
+        {
+          "name": "activity_explorer"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "spark": {
+      "label": "Spark",
+      "components": [
+        {
+          "name": "spark_jobhistory_server"
+        },
+        {
+          "name": "spark_thriftserver"
+        },
+        {
+          "name": "livy_server"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "spark2": {
+      "label": "Spark 2",
+      "components": [
+        {
+          "name": "spark2_jobhistory_server"
+        },
+        {
+          "name": "spark2_thriftserver"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "storm": {
+      "label": "Storm",
+      "components": [
+        {
+          "name": "storm_drpc"
+        },
+        {
+          "name": "storm_logviewer"
+        },
+        {
+          "name": "storm_nimbus"
+        },
+        {
+          "name": "storm_supervisor"
+        },
+        {
+          "name": "storm_ui"
+        },
+        {
+          "name": "storm_worker"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "yarn": {
+      "label": "YARN",
+      "components": [
+        {
+          "name": "yarn_nodemanager"
+        },
+        {
+          "name": "yarn_resourcemanager"
+        },
+        {
+          "name": "yarn_timelineserver"
+        },
+        {
+          "name": "yarn_historyserver"
+        },
+        {
+          "name": "yarn_jobsummary"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "zeppelin": {
+      "label": "Zeppelin",
+      "components": [
+        {
+          "name": "zeppelin"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "zookeeper": {
+      "label": "ZooKeeper",
+      "components": [
+        {
+          "name": "zookeeper"
+        }
+      ],
+      "dependencies": [
+      ]
+    },
+    "System": {
+      "label": "System",
+      "components": [
         {
           "name": "system_message"
         },
         {
           "name": "secure_log"
         }
-       ],
-       "dependencies": [
-       ]
-     },
-{% endif %}
-     "ambari": {
-       "label": "Ambari",
-       "components": [
-         {
-           "name": "ambari_agent"
-         },
-         {
-           "name": "ambari_server"
-         },
-         {
-           "name": "ambari_alerts"
-         },
-         {
-           "name": "ambari_audit"
-         },
-         {
-           "name": "ambari_config_changes"
-         },
-         {
-           "name": "ambari_eclipselink"
-         },
-         {
-           "name": "ambari_server_check_database"
-         }
-       ],
-       "dependencies": [
-       ]
-     },
-     "logsearch": {
-       "label": "Logsearch",
-       "components": [
-       {
-         "name": "logsearch_app"
-       },
-       {
-         "name": "logsearch_feeder"
-       },
-       {
-         "name": "logsearch_perf"
-       }
-       ],
-       "dependencies": [
-       ]
-      }
+      ],
+      "dependencies": [
+      ]
+    }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
index ea91405..25bfa63 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/input.config-logsearch.json.j2
@@ -20,17 +20,17 @@
     {
       "type":"logsearch_app",
       "rowtype":"service",
-      "path":"{{logsearch_log_dir}}/logsearch.json"
+      "path":"{{default('/configurations/logsearch-env/logsearch_log_dir', '/var/log/ambari-logsearch-portal')}}/logsearch.json"
     },
     {
       "type":"logsearch_feeder",
       "rowtype":"service",
-      "path":"{{logfeeder_log_dir}}/logsearch-logfeeder.json"
+      "path":"{{default('/configurations/logfeeder-env/logfeeder_log_dir', '/var/log/ambari-logsearch-logfeeder')}}/logsearch-logfeeder.json"
     },
     {
       "type":"logsearch_perf",
       "rowtype":"service",
-      "path":"{{logsearch_log_dir}}/logsearch-performance.json"
+      "path":"{{default('/configurations/logfeeder-env/logfeeder_log_dir', '/var/log/ambari-logsearch-logfeeder')}}/logsearch-performance.json"
     }
   ],
   "filter":[

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
deleted file mode 100644
index 2f13d3f..0000000
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/configuration/oozie-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Oozie</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>OOZIE_SERVER:oozie_app</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"oozie_app",
-      "rowtype":"service",
-      "path":"{{default('/configurations/oozie-env/oozie_log_dir', '/var/log/oozie')}}/oozie.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "oozie_app"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/templates/input.config-oozie.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/templates/input.config-oozie.json.j2 b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/templates/input.config-oozie.json.j2
new file mode 100644
index 0000000..4a54f74
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/templates/input.config-oozie.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"oozie_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/oozie-env/oozie_log_dir', '/var/log/oozie')}}/oozie.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "oozie_app"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
deleted file mode 100644
index fc26336..0000000
--- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/ranger-logsearch-conf.xml
+++ /dev/null
@@ -1,111 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Ranger</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>RANGER_SERVER:ranger_admin,ranger_dbpatch;RANGER_USERSYNC:ranger_usersync;</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"ranger_admin",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/xa_portal.log"
-    },
-    {
-      "type":"ranger_dbpatch",
-      "is_enabled":"true",
-      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/ranger_db_patch.log"
-    },
-    {
-      "type":"ranger_usersync",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ranger-env/ranger_usersync_log_dir', '/var/log/ranger/usersync')}}/usersync.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_admin",
-            "ranger_dbpatch"
-          ]
-         }
-       },
-      "log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_usersync"
-          ]
-         }
-       },
-      "log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
-      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/templates/input.config-ranger.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/templates/input.config-ranger.json.j2 b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/templates/input.config-ranger.json.j2
new file mode 100644
index 0000000..6c5bb1f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/templates/input.config-ranger.json.j2
@@ -0,0 +1,79 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"ranger_admin",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/xa_portal.log"
+    },
+    {
+      "type":"ranger_dbpatch",
+      "is_enabled":"true",
+      "path":"{{default('/configurations/ranger-env/ranger_admin_log_dir', '/var/log/ranger/admin')}}/ranger_db_patch.log"
+    },
+    {
+      "type":"ranger_usersync",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ranger-env/ranger_usersync_log_dir', '/var/log/ranger/usersync')}}/usersync.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_admin",
+            "ranger_dbpatch"
+          ]
+        }
+      },
+      "log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_usersync"
+          ]
+        }
+      },
+      "log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
deleted file mode 100644
index 226d2b0..0000000
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/configuration/ranger-kms-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Ranger KMS</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>RANGER_KMS:ranger_kms</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"ranger_kms",
-      "rowtype":"service",
-      "path":"{{default('/configurations/kms-env/kms_log_dir', '/var/log/ranger/kms')}}/kms.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ranger_kms"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/templates/input.config-ranger-kms.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/templates/input.config-ranger-kms.json.j2 b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/templates/input.config-ranger-kms.json.j2
new file mode 100644
index 0000000..306fade
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/templates/input.config-ranger-kms.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"ranger_kms",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kms-env/kms_log_dir', '/var/log/ranger/kms')}}/kms.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ranger_kms"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
deleted file mode 100644
index 63201ef..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/configuration/spark-logsearch-conf.xml
+++ /dev/null
@@ -1,98 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Spark</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>SPARK_JOBHISTORYSERVER:spark_jobhistory_server;SPARK_THRIFTSERVER:spark_thriftserver;LIVY_SERVER:livy_server</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-   "input":[
-      {
-       "type":"spark_jobhistory_server",
-       "rowtype":"service",
-       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
-     },
-     {
-       "type":"spark_thriftserver",
-       "rowtype":"service",
-       "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
-     },
-     {
-       "type":"livy_server",
-       "rowtype":"service",
-       "path":"{{default('/configurations/livy-env/livy_log_dir', '/var/log/livy')}}/livy-livy-server.out"
-     }
-   ],
-   "filter":[
-       {
-          "filter":"grok",
-          "conditions":{
-            "fields":{
-              "type":[
-                "spark_jobhistory_server",
-                "spark_thriftserver",
-                "livy_server"
-              ]
-             }
-          },
-          "log4j_format":"",
-          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
-          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
-          "post_map_values":{
-            "logtime":{
-              "map_date":{
-                "target_date_pattern":"yy/MM/dd HH:mm:ss"
-              }
-             },
-            "level":{
-              "map_fieldvalue":{
-                "pre_value":"WARNING",
-                "post_value":"WARN"
-              }
-             }
-           }
-      }
-   ]
-}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/templates/input.config-spark.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/templates/input.config-spark.json.j2 b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/templates/input.config-spark.json.j2
new file mode 100644
index 0000000..7eea751
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/templates/input.config-spark.json.j2
@@ -0,0 +1,66 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"spark_jobhistory_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
+    },
+    {
+      "type":"spark_thriftserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/spark-env/spark_log_dir', '/var/log/spark')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
+    },
+    {
+      "type":"livy_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/livy-env/livy_log_dir', '/var/log/livy')}}/livy-livy-server.out"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "spark_jobhistory_server",
+            "spark_thriftserver",
+            "livy_server"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
+      "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yy/MM/dd HH:mm:ss"
+          }
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
deleted file mode 100644
index cb71c6b..0000000
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/configuration/spark2-logsearch-conf.xml
+++ /dev/null
@@ -1,98 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Spark2</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>SPARK2_JOBHISTORYSERVER:spark2_jobhistory_server;SPARK2_THRIFTSERVER:spark2_thriftserver;LIVY2_SERVER:livy2_server</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-   "input":[
-      {
-       "type":"spark2_jobhistory_server",
-       "rowtype":"service",
-       "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
-     },
-     {
-       "type":"spark2_thriftserver",
-       "rowtype":"service",
-       "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
-     },
-     {
-       "type":"livy2_server",
-       "rowtype":"service",
-       "path":"{{default('/configurations/livy2-env/livy2_log_dir', '/var/log/livy2')}}/livy-livy-server.out"
-     }
-   ],
-   "filter":[
-      {
-          "filter":"grok",
-          "conditions":{
-            "fields":{
-              "type":[
-                "spark2_jobhistory_server",
-                "spark2_thriftserver",
-                "livy2_server"
-              ]
-             }
-          },
-          "log4j_format":"",
-          "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
-          "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
-          "post_map_values":{
-            "logtime":{
-              "map_date":{
-                "target_date_pattern":"yy/MM/dd HH:mm:ss"
-              }
-             },
-            "level":{
-              "map_fieldvalue":{
-                "pre_value":"WARNING",
-                "post_value":"WARN"
-              }
-             }
-           }
-      }
-   ]
-}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/templates/input.config-spark2.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/templates/input.config-spark2.json.j2 b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/templates/input.config-spark2.json.j2
new file mode 100644
index 0000000..e90aa65
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/templates/input.config-spark2.json.j2
@@ -0,0 +1,66 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"spark2_jobhistory_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.deploy.history.HistoryServer*.out"
+    },
+    {
+      "type":"spark2_thriftserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/spark2-env/spark_log_dir', '/var/log/spark2')}}/spark-*-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2*.out"
+    },
+    {
+      "type":"livy2_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/livy2-env/livy2_log_dir', '/var/log/livy2')}}/livy-livy-server.out"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "spark2_jobhistory_server",
+            "spark2_thriftserver",
+            "livy2_server"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level})",
+      "message_pattern":"(?m)^%{SPARK_DATESTAMP:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVAFILE:file}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yy/MM/dd HH:mm:ss"
+          }
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
deleted file mode 100644
index d485017..0000000
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/configuration/storm-logsearch-conf.xml
+++ /dev/null
@@ -1,110 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Storm</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>NIMBUS:storm_nimbus;SUPERVISOR:storm_supervisor,storm_worker,storm_logviewer;STORM_UI_SERVER:storm_ui;DRPC_SERVER:storm_drpc</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"storm_drpc",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/drpc.log"
-    },
-    {
-      "type":"storm_logviewer",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/logviewer.log"
-    },
-    {
-      "type":"storm_nimbus",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/nimbus.log"
-    },
-    {
-      "type":"storm_supervisor",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/supervisor.log"
-    },
-    {
-      "type":"storm_ui",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/ui.log"
-    },
-    {
-      "type":"storm_worker",
-      "rowtype":"service",
-      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/*worker*.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "storm_drpc",
-            "storm_logviewer",
-            "storm_nimbus",
-            "storm_supervisor",
-            "storm_ui",
-            "storm_worker"
-          ]
-         }
-       },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/input.config-storm.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/input.config-storm.json.j2 b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/input.config-storm.json.j2
new file mode 100644
index 0000000..a2a4841
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/input.config-storm.json.j2
@@ -0,0 +1,78 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"storm_drpc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/drpc.log"
+    },
+    {
+      "type":"storm_logviewer",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/logviewer.log"
+    },
+    {
+      "type":"storm_nimbus",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/nimbus.log"
+    },
+    {
+      "type":"storm_supervisor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/supervisor.log"
+    },
+    {
+      "type":"storm_ui",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/ui.log"
+    },
+    {
+      "type":"storm_worker",
+      "rowtype":"service",
+      "path":"{{default('/configurations/storm-env/storm_log_dir', '/var/log/storm')}}/*worker*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "storm_drpc",
+            "storm_logviewer",
+            "storm_nimbus",
+            "storm_supervisor",
+            "storm_ui",
+            "storm_worker"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
deleted file mode 100644
index 3c0abbf..0000000
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration-mapred/mapred-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>MapReduce</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>HISTORYSERVER:mapred_historyserver</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"mapred_historyserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/mapred-env/mapred_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/mapred-env/mapred_user', 'mapred')}}/mapred-{{default('configurations/mapred-env/mapred_user', 'mapred')}}-historyserver*.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "mapred_historyserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
deleted file mode 100644
index 95cf0c9..0000000
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/configuration/yarn-logsearch-conf.xml
+++ /dev/null
@@ -1,104 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>YARN</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>RESOURCEMANAGER:yarn_resourcemanager,yarn_historyserver,yarn_jobsummary;NODEMANAGER:yarn_nodemanager;APP_TIMELINE_SERVER:yarn_timelineserver</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"yarn_nodemanager",
-      "rowtype":"service",
-      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-nodemanager-*.log"
-    },
-    {
-      "type":"yarn_resourcemanager",
-      "rowtype":"service",
-      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-resourcemanager-*.log"
-    },
-    {
-      "type":"yarn_timelineserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-timelineserver-*.log"
-    },
-    {
-      "type":"yarn_historyserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-historyserver-*.log"
-    },
-    {
-      "type":"yarn_jobsummary",
-      "rowtype":"service",
-      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/hadoop-mapreduce.jobsummary.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "yarn_historyserver",
-            "yarn_jobsummary",
-            "yarn_nodemanager",
-            "yarn_resourcemanager",
-            "yarn_timelineserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
-}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-mapreduce2.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-mapreduce2.json.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-mapreduce2.json.j2
new file mode 100644
index 0000000..8034843
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-mapreduce2.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"mapred_historyserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/mapred-env/mapred_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/mapred-env/mapred_user', 'mapred')}}/mapred-{{default('configurations/mapred-env/mapred_user', 'mapred')}}-historyserver*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "mapred_historyserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-yarn.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-yarn.json.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-yarn.json.j2
new file mode 100644
index 0000000..bf1dd37
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/input.config-yarn.json.j2
@@ -0,0 +1,72 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"yarn_nodemanager",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-nodemanager-*.log"
+    },
+    {
+      "type":"yarn_resourcemanager",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-resourcemanager-*.log"
+    },
+    {
+      "type":"yarn_timelineserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-timelineserver-*.log"
+    },
+    {
+      "type":"yarn_historyserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/yarn-{{default('configurations/yarn-env/yarn_user', 'yarn')}}-historyserver-*.log"
+    },
+    {
+      "type":"yarn_jobsummary",
+      "rowtype":"service",
+      "path":"{{default('/configurations/yarn-env/yarn_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/yarn-env/yarn_user', 'yarn')}}/hadoop-mapreduce.jobsummary.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "yarn_historyserver",
+            "yarn_jobsummary",
+            "yarn_nodemanager",
+            "yarn_resourcemanager",
+            "yarn_timelineserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
deleted file mode 100644
index 0b8ab7a..0000000
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Zeppelin</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>ZEPPELIN_MASTER:zeppelin</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"zeppelin",
-      "rowtype":"service",
-      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "zeppelin"
-          ]
-         }
-       },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2 b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2
new file mode 100644
index 0000000..2b373d5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/templates/input.config-zeppelin.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"zeppelin",
+      "rowtype":"service",
+      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', '/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "zeppelin"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
deleted file mode 100644
index 325af14..0000000
--- a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/configuration/zookeeper-logsearch-conf.xml
+++ /dev/null
@@ -1,76 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Zookeeper</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>ZOOKEEPER_SERVER:zookeeper</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-     "type":"zookeeper",
-     "rowtype":"service",
-     "path":"{{default('/configurations/zookeeper-env/zk_log_dir', '/var/log/zookeeper')}}/zookeeper*.log"
-    }
-  ],
-  "filter":[
-   {
-      "filter":"grok",
-      "conditions":{
-        "fields":{"type":["zookeeper"]}
-      },
-     "log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
-     "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-     "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-     "post_map_values": {
-       "logtime": {
-         "map_date":{
-           "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-         }
-       }
-     }
-    }
-   ]
-}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/templates/input.config-zookeeper.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/templates/input.config-zookeeper.json.j2 b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/templates/input.config-zookeeper.json.j2
new file mode 100644
index 0000000..cc20847
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ZOOKEEPER/3.4.5/package/templates/input.config-zookeeper.json.j2
@@ -0,0 +1,46 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"zookeeper",
+      "rowtype":"service",
+      "path":"{{default('/configurations/zookeeper-env/zk_log_dir', '/var/log/zookeeper')}}/zookeeper*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":["zookeeper"]
+        }
+      },
+      "log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values": {
+        "logtime": {
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 1782298..fbea258 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -27,6 +27,7 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions.version import format_stack_version
+from string import lower
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -44,6 +45,19 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
 # current host stack version
 current_version = default("/hostLevelParams/current_version", None)
 
+# service name
+service_name = config['serviceName']
+
+# logsearch configuration
+logsearch_logfeeder_conf = "/etc/ambari-logsearch-logfeeder/conf"
+
+agent_cache_dir = config['hostLevelParams']['agentCacheDir']
+service_package_folder = config['commandParams']['service_package_folder']
+logsearch_service_name = service_name.lower().replace("_", "-")
+logsearch_config_file_name = 'input.config-' + logsearch_service_name + ".json"
+logsearch_config_file_path = agent_cache_dir + "/" + service_package_folder + "/templates/" + logsearch_config_file_name + ".j2"
+logsearch_config_file_exists = os.path.exists(logsearch_config_file_path)
+
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index e9f2283..36a202f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -19,9 +19,13 @@ limitations under the License.
 import os
 
 import ambari_simplejson as json
+from ambari_jinja2 import Environment as JinjaEnvironment
 from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Directory, File
+from resource_management.core.source import InlineTemplate, Template
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.fcntl_based_process_lock import FcntlBasedProcessLock
@@ -72,6 +76,19 @@ def setup_config():
               group=params.user_group,
               only_if=format("ls {hadoop_conf_dir}"))
 
+  Directory(params.logsearch_logfeeder_conf,
+            mode=0755,
+            cd_access='a',
+            create_parents=True
+            )
+
+  if params.logsearch_config_file_exists:
+    File(format("{logsearch_logfeeder_conf}/" + params.logsearch_config_file_name),
+         content=Template(params.logsearch_config_file_path,extra_imports=[default])
+         )
+  else:
+    Logger.warning('No logsearch configuration exists at ' + params.logsearch_config_file_path)
+
 
 def load_version(struct_out_file):
   """


[5/5] ambari git commit: AMBARI-20578 Log Search Configuration API (mgergely)

Posted by mg...@apache.org.
AMBARI-20578 Log Search Configuration API (mgergely)

Change-Id: I2415e9402fa002dedb566cfebada4cf34ef1d4a6


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ac0ba42
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ac0ba42
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ac0ba42

Branch: refs/heads/trunk
Commit: 0ac0ba424e31db38704d8e7a59ac60b853094cda
Parents: 754d6c8
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Apr 12 15:02:14 2017 +0200
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Apr 12 15:02:14 2017 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-config-api/.gitignore      |   1 +
 .../ambari-logsearch-config-api/pom.xml         |  57 ++
 .../config/api/InputConfigMonitor.java          |  41 ++
 .../logsearch/config/api/LogSearchConfig.java   |  90 +++
 .../config/api/LogSearchConfigFactory.java      |  68 +++
 .../config/api/LogSearchConfigClass1.java       |  55 ++
 .../config/api/LogSearchConfigClass2.java       |  55 ++
 .../config/api/LogSearchConfigFactoryTest.java  |  58 ++
 .../config/api/NonLogSearchConfigClass.java     |  23 +
 .../src/test/resources/log4j.xml                |  34 ++
 .../.gitignore                                  |   1 +
 .../ambari-logsearch-config-zookeeper/pom.xml   |  74 +++
 .../config/zookeeper/LogSearchConfigZK.java     | 213 ++++++++
 .../ambari-logsearch-logfeeder/pom.xml          |  66 ++-
 .../org/apache/ambari/logfeeder/LogFeeder.java  | 384 +------------
 .../ambari/logfeeder/common/ConfigHandler.java  | 428 +++++++++++++++
 .../logfeeder/input/InputConfigUploader.java    |  94 ++++
 .../ambari/logfeeder/input/InputManager.java    | 304 ++++++-----
 .../ambari/logfeeder/input/InputSimulate.java   |  22 +-
 .../ambari/logfeeder/output/OutputManager.java  |   7 -
 .../ambari/logfeeder/output/OutputS3File.java   |   4 +-
 .../ambari/logfeeder/util/LogFeederUtil.java    |   3 +
 .../apache/ambari/logfeeder/util/SSLUtil.java   |   5 +-
 .../src/main/resources/log4j.xml                |   6 +-
 .../ambari/logfeeder/input/InputFileTest.java   |   3 +-
 .../logfeeder/input/InputManagerTest.java       |  70 +--
 .../logfeeder/output/OutputManagerTest.java     |  11 +-
 .../ambari-logsearch-server/pom.xml             |  18 +-
 .../org/apache/ambari/logsearch/LogSearch.java  |   1 +
 .../logsearch/common/PropertiesHelper.java      |   3 +
 .../conf/global/LogSearchConfigState.java       |  35 ++
 .../ambari/logsearch/configurer/Configurer.java |  23 +
 .../configurer/LogSearchConfigConfigurer.java   |  69 +++
 .../configurer/LogfeederFilterConfigurer.java   |   2 +-
 .../configurer/SolrAuditAliasConfigurer.java    |   2 +-
 .../configurer/SolrCollectionConfigurer.java    |   2 +-
 .../logsearch/configurer/SolrConfigurer.java    |  23 -
 .../ambari/logsearch/doc/DocConstants.java      |   6 +
 .../logsearch/manager/ShipperConfigManager.java |  81 +++
 .../logsearch/rest/ShipperConfigResource.java   |  73 +++
 .../ambari-logsearch-web/.gitignore             |   1 +
 .../test-config/logfeeder/logfeeder.properties  |  12 +-
 .../test-config/logsearch/logsearch.properties  |   2 +
 ambari-logsearch/pom.xml                        |   2 +
 .../server/upgrade/UpgradeCatalog300.java       |  35 ++
 .../configuration/accumulo-logsearch-conf.xml   | 124 -----
 .../templates/input.config-accumulo.json.j2     |  92 ++++
 .../configuration/infra-logsearch-conf.xml      |  80 ---
 .../templates/input.config-ambari-infra.json.j2 |  48 ++
 .../0.1.0/configuration/ams-logsearch-conf.xml  | 201 -------
 .../input.config-ambari-metrics.json.j2         | 169 ++++++
 .../configuration/atlas-logsearch-conf.xml      |  80 ---
 .../templates/input.config-atlas.json.j2        |  48 ++
 .../configuration/falcon-logsearch-conf.xml     |  80 ---
 .../templates/input.config-falcon.json.j2       |  48 ++
 .../configuration/flume-logsearch-conf.xml      |  80 ---
 .../templates/input.config-flume.json.j2        |  53 ++
 .../configuration/hbase-logsearch-conf.xml      | 111 ----
 .../templates/input.config-hbase.json.j2        |  79 +++
 .../configuration/hdfs-logsearch-conf.xml       | 248 ---------
 .../package/templates/input.config-hdfs.json.j2 | 216 ++++++++
 .../configuration/hive-logsearch-conf.xml       | 117 ----
 .../package/templates/input.config-hive.json.j2 |  85 +++
 .../configuration/kafka-logsearch-conf.xml      | 124 -----
 .../templates/input.config-kafka.json.j2        |  92 ++++
 .../configuration/knox-logsearch-conf.xml       |  93 ----
 .../package/templates/input.config-knox.json.j2 |  60 ++
 .../configuration/logfeeder-ambari-config.xml   |   1 +
 .../logfeeder-custom-logsearch-conf.xml         |  46 --
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  44 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |  15 +-
 .../templates/HadoopServiceConfig.json.j2       | 545 ++++++++++++++++---
 .../templates/input.config-logsearch.json.j2    |   6 +-
 .../configuration/oozie-logsearch-conf.xml      |  80 ---
 .../templates/input.config-oozie.json.j2        |  48 ++
 .../configuration/ranger-logsearch-conf.xml     | 111 ----
 .../templates/input.config-ranger.json.j2       |  79 +++
 .../configuration/ranger-kms-logsearch-conf.xml |  80 ---
 .../templates/input.config-ranger-kms.json.j2   |  48 ++
 .../configuration/spark-logsearch-conf.xml      |  98 ----
 .../templates/input.config-spark.json.j2        |  66 +++
 .../configuration/spark2-logsearch-conf.xml     |  98 ----
 .../templates/input.config-spark2.json.j2       |  66 +++
 .../configuration/storm-logsearch-conf.xml      | 110 ----
 .../templates/input.config-storm.json.j2        |  78 +++
 .../mapred-logsearch-conf.xml                   |  80 ---
 .../configuration/yarn-logsearch-conf.xml       | 104 ----
 .../templates/input.config-mapreduce2.json.j2   |  48 ++
 .../package/templates/input.config-yarn.json.j2 |  72 +++
 .../configuration/zeppelin-logsearch-conf.xml   |  80 ---
 .../templates/input.config-zeppelin.json.j2     |  48 ++
 .../configuration/zookeeper-logsearch-conf.xml  |  76 ---
 .../templates/input.config-zookeeper.json.j2    |  46 ++
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  14 +
 .../scripts/shared_initialization.py            |  17 +
 .../configuration/hbase-logsearch-conf.xml      | 111 ----
 .../templates/input.config-hbase.json.j2        |  79 +++
 .../configuration/hdfs-logsearch-conf.xml       | 248 ---------
 .../package/templates/input.config-hdfs.json.j2 | 216 ++++++++
 .../server/upgrade/UpgradeCatalog300Test.java   |  87 +++
 .../stacks/2.4/LOGSEARCH/test_logfeeder.py      |  28 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |   1 +
 102 files changed, 4451 insertions(+), 3418 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/.gitignore
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/.gitignore b/ambari-logsearch/ambari-logsearch-config-api/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/pom.xml b/ambari-logsearch/ambari-logsearch-config-api/pom.xml
new file mode 100644
index 0000000..e9abed0
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/pom.xml
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-logsearch-config-api</artifactId>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Config Api</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+  
+  <dependencies>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.4</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java
new file mode 100644
index 0000000..df26920
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/InputConfigMonitor.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+/**
+ * Monitors input configuration changes. 
+ */
+public interface InputConfigMonitor {
+  /**
+   * Notification of a new input configuration.
+   * 
+   * @param serviceName The name of the service for which the input configuration was created.
+   * @param inputConfig The input configuration.
+   * @throws Exception
+   */
+  void loadInputConfigs(String serviceName, String inputConfig) throws Exception;
+  
+  /**
+   * Notification of the removal of an input configuration.
+   * 
+   * @param serviceName The name of the service of which's input configuration was removed.
+   */
+  void removeInputs(String serviceName);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java
new file mode 100644
index 0000000..0bb0b78
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfig.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.io.Closeable;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Log Search Configuration, which uploads, retrieves configurations, and monitors it's changes.
+ */
+public interface LogSearchConfig extends Closeable {
+  /**
+   * Enumeration of the components of the Log Search service.
+   */
+  public enum Component {
+    SERVER, LOGFEEDER;
+  }
+  
+  /**
+   * Initialization of the configuration.
+   * 
+   * @param component The component which will use the configuration.
+   * @param properties The properties of that component.
+   * @throws Exception
+   */
+  void init(Component component, Map<String, String> properties) throws Exception;
+  
+  /**
+   * Returns all the service names with input configurations of a cluster. Will be used only in SERVER mode.
+   * 
+   * @param clusterName The name of the cluster which's services are required.
+   * @return List of the service names.
+   */
+  List<String> getServices(String clusterName);
+  
+  /**
+   * Checks if input configuration exists.
+   * 
+   * @param clusterName The name of the cluster where the service is looked for.
+   * @param serviceName The name of the service looked for.
+   * @return If input configuration exists for the service.
+   * @throws Exception
+   */
+  boolean inputConfigExists(String clusterName, String serviceName) throws Exception;
+  
+  /**
+   * Returns the input configuration of a service in a cluster. Will be used only in SERVER mode.
+   * 
+   * @param clusterName The name of the cluster where the service is looked for.
+   * @param serviceName The name of the service looked for.
+   * @return The input configuration for the service if it exists, null otherwise.
+   */
+  String getInputConfig(String clusterName, String serviceName);
+  
+  /**
+   * Uploads the input configuration for a service in a cluster.
+   * 
+   * @param clusterName The name of the cluster where the service is.
+   * @param serviceName The name of the service of which's input configuration is uploaded.
+   * @param inputConfig The input configuration of the service.
+   * @throws Exception
+   */
+  void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception;
+  
+  /**
+   * Starts the monitoring of the input configurations, asynchronously. Will be used only in LOGFEEDER mode.
+   * 
+   * @param configMonitor The input config monitor to call in case of a config change.
+   * @throws Exception
+   */
+  void monitorInputConfigChanges(InputConfigMonitor configMonitor) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java
new file mode 100644
index 0000000..6ef4b90
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/main/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactory.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Factory class for LogSearchConfig.
+ */
+public class LogSearchConfigFactory {
+  private static final Logger LOG = Logger.getLogger(LogSearchConfigFactory.class);
+
+  /**
+   * Creates a Log Search Configuration instance that implements {@link org.apache.ambari.logsearch.config.api.LogSearchConfig}.
+   * 
+   * @param component The component of the Log Search Service to create the configuration for (SERVER/LOGFEEDER).
+   * @param properties The properties of the component for which the configuration is created. If the properties contain the
+   *                  "logsearch.config.class" entry than the class defined there would be used instead of the default class.
+   * @param defaultClass The default configuration class to use if not specified otherwise.
+   * @return The Log Search Configuration instance.
+   * @throws Exception Throws exception if the defined class does not implement LogSearchConfig, or doesn't have an empty
+   *                   constructor, or throws an exception in it's init method.
+   */
+  public static LogSearchConfig createLogSearchConfig(Component component, Map<String, String> properties,
+      Class<? extends LogSearchConfig> defaultClass) throws Exception {
+    try {
+      LogSearchConfig logSearchConfig = null;
+      String configClassName = properties.get("logsearch.config.class");
+      if (!StringUtils.isBlank(configClassName)) {
+        Class<?> clazz = Class.forName(configClassName);
+        if (LogSearchConfig.class.isAssignableFrom(clazz)) {
+          logSearchConfig = (LogSearchConfig) clazz.newInstance();
+        } else {
+          throw new IllegalArgumentException("Class " + configClassName + " does not implement the interface " + LogSearchConfig.class.getName());
+        }
+      } else {
+        logSearchConfig = defaultClass.newInstance();
+      }
+      
+      logSearchConfig.init(component, properties);
+      return logSearchConfig;
+    } catch (Exception e) {
+      LOG.fatal("Could not initialize logsearch config.", e);
+      throw e;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java
new file mode 100644
index 0000000..969eb30
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass1.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+
+public class LogSearchConfigClass1 implements LogSearchConfig {
+  @Override
+  public void init(Component component, Map<String, String> properties) {}
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    return false;
+  }
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void monitorInputConfigChanges(InputConfigMonitor configMonitor) throws Exception {}
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public String getInputConfig(String clusterName, String serviceName) {
+    return null;
+  }
+  
+  @Override
+  public void close() {}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java
new file mode 100644
index 0000000..664ecc9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigClass2.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+
+public class LogSearchConfigClass2 implements LogSearchConfig {
+  @Override
+  public void init(Component component, Map<String, String> properties) {}
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    return false;
+  }
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {}
+
+  @Override
+  public void monitorInputConfigChanges(InputConfigMonitor configMonitor) throws Exception {}
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    return null;
+  }
+
+  @Override
+  public String getInputConfig(String clusterName, String serviceName) {
+    return null;
+  }
+  
+  @Override
+  public void close() {}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java
new file mode 100644
index 0000000..8e7154e
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/LogSearchConfigFactoryTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+import java.util.Collections;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableMap;
+
+import junit.framework.Assert;
+
+public class LogSearchConfigFactoryTest {
+
+  @Test
+  public void testDefaultConfig() throws Exception {
+    LogSearchConfig config = LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+        Collections.<String, String> emptyMap(), LogSearchConfigClass1.class);
+    
+    Assert.assertSame(config.getClass(), LogSearchConfigClass1.class);
+  }
+
+  @Test
+  public void testCustomConfig() throws Exception {
+    LogSearchConfig config = LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+        ImmutableMap.of("logsearch.config.class", "org.apache.ambari.logsearch.config.api.LogSearchConfigClass2"),
+        LogSearchConfigClass1.class);
+    
+    Assert.assertSame(config.getClass(), LogSearchConfigClass2.class);
+  }
+  
+  @Test(expected = IllegalArgumentException.class)
+  public void testNonConfigClass() throws Exception {
+    LogSearchConfigFactory.createLogSearchConfig(Component.SERVER,
+        ImmutableMap.of("logsearch.config.class", "org.apache.ambari.logsearch.config.api.NonLogSearchConfigClass"),
+        LogSearchConfigClass1.class);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java
new file mode 100644
index 0000000..9564f33
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/java/org/apache/ambari/logsearch/config/api/NonLogSearchConfigClass.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.api;
+
+public class NonLogSearchConfigClass {
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml
new file mode 100644
index 0000000..6d968f9
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-api/src/test/resources/log4j.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
+
+<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
+  <appender name="console" class="org.apache.log4j.ConsoleAppender">
+    <param name="Target" value="System.out" />
+    <layout class="org.apache.log4j.PatternLayout">
+      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
+      <!-- <param name="ConversionPattern" value="%d [%t] %-5p %c %x - %m%n"/> -->
+    </layout>
+  </appender>
+
+  <root>
+    <priority value="warn" />
+    <appender-ref ref="console" />
+  </root>
+
+</log4j:configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore b/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml b/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml
new file mode 100644
index 0000000..4ed8eba
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/pom.xml
@@ -0,0 +1,74 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <parent>
+    <artifactId>ambari-logsearch</artifactId>
+    <groupId>org.apache.ambari</groupId>
+    <version>2.0.0.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+
+  <artifactId>ambari-logsearch-config-zookeeper</artifactId>
+  <packaging>jar</packaging>
+  <name>Ambari Logsearch Config Zookeeper</name>
+  <url>http://maven.apache.org</url>
+
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-config-api</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-lang3</artifactId>
+      <version>3.4</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+      <version>3.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>3.4.9</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-framework</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-client</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-recipes</artifactId>
+      <version>2.12.0</version>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java
new file mode 100644
index 0000000..738fde2
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-config-zookeeper/src/main/java/org/apache/ambari/logsearch/config/zookeeper/LogSearchConfigZK.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.config.zookeeper;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.recipes.cache.ChildData;
+import org.apache.curator.framework.recipes.cache.TreeCache;
+import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
+import org.apache.curator.framework.recipes.cache.TreeCacheListener;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.utils.ZKPaths;
+import org.apache.log4j.Logger;
+import org.apache.zookeeper.ZooDefs;
+import org.apache.zookeeper.data.ACL;
+import org.apache.zookeeper.data.Id;
+
+import com.google.common.base.Splitter;
+
+public class LogSearchConfigZK implements LogSearchConfig {
+  private static final Logger LOG = Logger.getLogger(LogSearchConfigZK.class);
+
+  private static final int SESSION_TIMEOUT = 15000;
+  private static final int CONNECTION_TIMEOUT = 30000;
+  private static final String DEFAULT_ZK_ROOT = "/logsearch";
+  private static final long WAIT_FOR_ROOT_SLEEP_SECONDS = 10;
+
+  private static final String CLUSTER_NAME_PROPERTY = "cluster.name";
+  private static final String ZK_CONNECT_STRING_PROPERTY = "logsearch.config.zk_connect_string";
+  private static final String ZK_ACLS_PROPERTY = "logsearch.config.zk_acls";
+  private static final String ZK_ROOT_NODE_PROPERTY = "logsearch.config.zk_root";
+
+  private Map<String, String> properties;
+  private String root;
+  private CuratorFramework client;
+  private TreeCache cache;
+
+  @Override
+  public void init(Component component, Map<String, String> properties) throws Exception {
+    this.properties = properties;
+    
+    LOG.info("Connecting to ZooKeeper at " + properties.get(ZK_CONNECT_STRING_PROPERTY));
+    client = CuratorFrameworkFactory.builder()
+        .connectString(properties.get(ZK_CONNECT_STRING_PROPERTY))
+        .retryPolicy(new ExponentialBackoffRetry(1000, 3))
+        .connectionTimeoutMs(CONNECTION_TIMEOUT)
+        .sessionTimeoutMs(SESSION_TIMEOUT)
+        .build();
+    client.start();
+
+    root = MapUtils.getString(properties, ZK_ROOT_NODE_PROPERTY, DEFAULT_ZK_ROOT);
+
+    if (component == Component.SERVER) {
+      if (client.checkExists().forPath(root) == null) {
+        client.create().creatingParentContainersIfNeeded().forPath(root);
+      }
+      cache = new TreeCache(client, root);
+      cache.start();
+    } else {
+      while (client.checkExists().forPath(root) == null) {
+        LOG.info("Root node is not present yet, going to sleep for " + WAIT_FOR_ROOT_SLEEP_SECONDS + " seconds");
+        Thread.sleep(WAIT_FOR_ROOT_SLEEP_SECONDS * 1000);
+      }
+
+      cache = new TreeCache(client, String.format("%s/%s", root, properties.get(CLUSTER_NAME_PROPERTY)));
+    }
+  }
+
+  @Override
+  public boolean inputConfigExists(String clusterName, String serviceName) throws Exception {
+    String nodePath = root + "/" + clusterName + "/input/" + serviceName;
+    return cache.getCurrentData(nodePath) != null;
+  }
+
+  @Override
+  public void setInputConfig(String clusterName, String serviceName, String inputConfig) throws Exception {
+    String nodePath = String.format("%s/%s/input/%s", root, clusterName, serviceName);
+    client.create().creatingParentContainersIfNeeded().withACL(getAcls()).forPath(nodePath, inputConfig.getBytes());
+    LOG.info("Set input config for the service " + serviceName + " for cluster " + clusterName);
+  }
+
+  private List<ACL> getAcls() {
+    String aclStr = properties.get(ZK_ACLS_PROPERTY);
+    if (StringUtils.isBlank(aclStr)) {
+      return ZooDefs.Ids.OPEN_ACL_UNSAFE;
+    }
+
+    List<ACL> acls = new ArrayList<>();
+    List<String> aclStrList = Splitter.on(",").omitEmptyStrings().trimResults().splitToList(aclStr);
+    for (String unparcedAcl : aclStrList) {
+      String[] parts = unparcedAcl.split(":");
+      if (parts.length == 3) {
+        acls.add(new ACL(parsePermission(parts[2]), new Id(parts[0], parts[1])));
+      }
+    }
+    return acls;
+  }
+
+  private Integer parsePermission(String permission) {
+    int permissionCode = 0;
+    for (char each : permission.toLowerCase().toCharArray()) {
+      switch (each) {
+        case 'r':
+          permissionCode |= ZooDefs.Perms.READ;
+          break;
+        case 'w':
+          permissionCode |= ZooDefs.Perms.WRITE;
+          break;
+        case 'c':
+          permissionCode |= ZooDefs.Perms.CREATE;
+          break;
+        case 'd':
+          permissionCode |= ZooDefs.Perms.DELETE;
+          break;
+        case 'a':
+          permissionCode |= ZooDefs.Perms.ADMIN;
+          break;
+        default:
+          throw new IllegalArgumentException("Unsupported permission: " + permission);
+      }
+    }
+    return permissionCode;
+  }
+
+  @Override
+  public void monitorInputConfigChanges(final InputConfigMonitor configMonitor) throws Exception {
+    TreeCacheListener listener = new TreeCacheListener() {
+      public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception {
+        if (!event.getData().getPath().startsWith(String.format("%s/%s/input/", root, properties.get(CLUSTER_NAME_PROPERTY)))) {
+          return;
+        }
+        
+        String nodeName = ZKPaths.getNodeFromPath(event.getData().getPath());
+        String nodeData = new String(event.getData().getData());
+        switch (event.getType()) {
+          case NODE_ADDED:
+            LOG.info("Node added under input ZK node: " + nodeName);
+            addInputs(nodeName, nodeData);
+            break;
+          case NODE_UPDATED:
+            LOG.info("Node updated under input ZK node: " + nodeName);
+            removeInputs(nodeName);
+            addInputs(nodeName, nodeData);
+            break;
+          case NODE_REMOVED:
+            LOG.info("Node removed from input ZK node: " + nodeName);
+            removeInputs(nodeName);
+            break;
+          default:
+            break;
+        }
+      }
+
+      private void removeInputs(String serviceName) {
+        configMonitor.removeInputs(serviceName);
+      }
+
+      private void addInputs(String serviceName, String inputConfig) {
+        try {
+          configMonitor.loadInputConfigs(serviceName, inputConfig);
+        } catch (Exception e) {
+          LOG.error("Could not load input configuration for service " + serviceName + ":\n" + inputConfig, e);
+        }
+      }
+    };
+    cache.getListenable().addListener(listener);
+    cache.start();
+  }
+
+  @Override
+  public List<String> getServices(String clusterName) {
+    String parentPath = String.format("%s/%s/input", root, clusterName);
+    Map<String, ChildData> serviceNodes = cache.getCurrentChildren(parentPath);
+    return new ArrayList<String>(serviceNodes.keySet());
+  }
+
+  @Override
+  public String getInputConfig(String clusterName, String serviceName) {
+    ChildData childData = cache.getCurrentData(String.format("%s/%s/input/%s", root, clusterName, serviceName));
+    return childData == null ? null : new String(childData.getData());
+  }
+
+  @Override
+  public void close() {
+    LOG.info("Closing ZooKeeper Connection");
+    client.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
index 25e4306..5d6f8b6 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/pom.xml
@@ -44,6 +44,11 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-config-zookeeper</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
       <groupId>commons-codec</groupId>
       <artifactId>commons-codec</artifactId>
     </dependency>
@@ -88,7 +93,6 @@
       <artifactId>commons-logging</artifactId>
       <version>1.1.1</version>
     </dependency>
-
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
@@ -125,9 +129,9 @@
       <version>${project.version}</version>
     </dependency>
     <dependency>
-    <groupId>com.amazonaws</groupId>
-    <artifactId>aws-java-sdk-s3</artifactId>
-    <version>1.11.5</version>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-s3</artifactId>
+      <version>1.11.5</version>
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
@@ -135,26 +139,40 @@
       <version>1.11</version>
     </dependency>
     <dependency>
-    <groupId>com.amazonaws</groupId>
-    <artifactId>aws-java-sdk-iam</artifactId>
-    <version>1.11.5</version>
-  </dependency>
-   <dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-common</artifactId>
-    <version>${hadoop.version}</version>
-  </dependency>
-  <dependency>
-    <groupId>org.apache.hadoop</groupId>
-    <artifactId>hadoop-hdfs</artifactId>
-    <version>${hadoop.version}</version>
-  </dependency>
-  <dependency>
-    <groupId>commons-io</groupId>
-    <artifactId>commons-io</artifactId>
-    <version>${common.io.version}</version>
-  </dependency>
- </dependencies>
+      <groupId>com.amazonaws</groupId>
+      <artifactId>aws-java-sdk-iam</artifactId>
+      <version>1.11.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-framework</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-recipes</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <version>${common.io.version}</version>
+    </dependency>
+  </dependencies>
   <build>
     <finalName>LogFeeder</finalName>
     <pluginManagement>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
index a47c71f..074fedb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/LogFeeder.java
@@ -19,60 +19,37 @@
 
 package org.apache.ambari.logfeeder;
 
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.lang.reflect.Type;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
-import java.util.Set;
 
-import org.apache.ambari.logfeeder.filter.Filter;
-import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.common.ConfigHandler;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZK;
+import org.apache.ambari.logfeeder.input.InputConfigUploader;
 import org.apache.ambari.logfeeder.input.InputManager;
-import org.apache.ambari.logfeeder.input.InputSimulate;
 import org.apache.ambari.logfeeder.logconfig.LogConfigHandler;
 import org.apache.ambari.logfeeder.metrics.MetricData;
 import org.apache.ambari.logfeeder.metrics.MetricsManager;
-import org.apache.ambari.logfeeder.output.Output;
-import org.apache.ambari.logfeeder.output.OutputManager;
-import org.apache.ambari.logfeeder.util.AliasUtil;
-import org.apache.ambari.logfeeder.util.FileUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.SSLUtil;
-import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
+import org.apache.curator.shaded.com.google.common.collect.Maps;
 import org.apache.hadoop.util.ShutdownHookManager;
-import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-import com.google.gson.reflect.TypeToken;
-
 public class LogFeeder {
   private static final Logger LOG = Logger.getLogger(LogFeeder.class);
 
   private static final int LOGFEEDER_SHUTDOWN_HOOK_PRIORITY = 30;
   private static final int CHECKPOINT_CLEAN_INTERVAL_MS = 24 * 60 * 60 * 60 * 1000; // 24 hours
 
-  private OutputManager outputManager = new OutputManager();
+  private ConfigHandler configHandler = new ConfigHandler();
+  private LogSearchConfig config;
+  
   private InputManager inputManager = new InputManager();
   private MetricsManager metricsManager = new MetricsManager();
 
-  public static Map<String, Object> globalConfigs = new HashMap<>();
-
-  private List<Map<String, Object>> inputConfigList = new ArrayList<>();
-  private List<Map<String, Object>> filterConfigList = new ArrayList<>();
-  private List<Map<String, Object>> outputConfigList = new ArrayList<>();
-  
   private long lastCheckPointCleanedMS = 0;
   private boolean isLogfeederCompleted = false;
   private Thread statLoggerThread = null;
@@ -91,329 +68,23 @@ public class LogFeeder {
   }
 
   private void init() throws Throwable {
-    Date startTime = new Date();
+    long startTime = System.currentTimeMillis();
 
-    loadConfigFiles();
-    addSimulatedInputs();
-    mergeAllConfigs();
-    
+    configHandler.init();
+    LogConfigHandler.handleConfig();
     SSLUtil.ensureStorePasswords();
     
-    outputManager.init();
-    inputManager.init();
-    metricsManager.init();
+    config = LogSearchConfigFactory.createLogSearchConfig(Component.LOGFEEDER,
+        Maps.fromProperties(LogFeederUtil.getProperties()), LogSearchConfigZK.class);
+    InputConfigUploader.load(config);
+    config.monitorInputConfigChanges(configHandler);
     
-    LogConfigHandler.handleConfig();
+    metricsManager.init();
     
     LOG.debug("==============");
     
-    Date endTime = new Date();
-    LOG.info("Took " + (endTime.getTime() - startTime.getTime()) + " ms to initialize");
-  }
-
-  private void loadConfigFiles() throws Exception {
-    List<String> configFiles = getConfigFiles();
-    for (String configFileName : configFiles) {
-      LOG.info("Going to load config file:" + configFileName);
-      configFileName = configFileName.replace("\\ ", "%20");
-      File configFile = new File(configFileName);
-      if (configFile.exists() && configFile.isFile()) {
-        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
-        loadConfigsUsingFile(configFile);
-      } else {
-        LOG.info("Trying to load config file from classloader: " + configFileName);
-        loadConfigsUsingClassLoader(configFileName);
-        LOG.info("Loaded config file from classloader: " + configFileName);
-      }
-    }
-  }
-
-  private List<String> getConfigFiles() {
-    List<String> configFiles = new ArrayList<>();
-    
-    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
-    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
-    if (logfeederConfigFilesProperty != null) {
-      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
-    }
-
-    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
-    if (StringUtils.isNotEmpty(inputConfigDir)) {
-      File configDirFile = new File(inputConfigDir);
-      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
-      for (File inputConfigFile : inputConfigFiles) {
-        configFiles.add(inputConfigFile.getAbsolutePath());
-      }
-    }
-    
-    if (CollectionUtils.isEmpty(configFiles)) {
-      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
-      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
-    }
-    
-    return configFiles;
-  }
-
-  private void loadConfigsUsingFile(File configFile) throws Exception {
-    try {
-      String configData = FileUtils.readFileToString(configFile);
-      loadConfigs(configData);
-    } catch (Exception t) {
-      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
-      throw t;
-    }
-  }
-
-  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
-    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
-      String configData = IOUtils.toString(fis);
-      loadConfigs(configData);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  private void loadConfigs(String configData) throws Exception {
-    Type type = new TypeToken<Map<String, Object>>() {}.getType();
-    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
-
-    // Get the globals
-    for (String key : configMap.keySet()) {
-      switch (key) {
-        case "global" :
-          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
-          break;
-        case "input" :
-          List<Map<String, Object>> inputConfig = (List<Map<String, Object>>) configMap.get(key);
-          inputConfigList.addAll(inputConfig);
-          break;
-        case "filter" :
-          List<Map<String, Object>> filterConfig = (List<Map<String, Object>>) configMap.get(key);
-          filterConfigList.addAll(filterConfig);
-          break;
-        case "output" :
-          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
-          outputConfigList.addAll(outputConfig);
-          break;
-        default :
-          LOG.warn("Unknown config key: " + key);
-      }
-    }
-  }
-  
-  private void addSimulatedInputs() {
-    int simulatedInputNumber = LogFeederUtil.getIntProperty("logfeeder.simulate.input_number", 0);
-    if (simulatedInputNumber == 0)
-      return;
-    
-    InputSimulate.loadTypeToFilePath(inputConfigList);
-    inputConfigList.clear();
-    
-    for (int i = 0; i < simulatedInputNumber; i++) {
-      HashMap<String, Object> mapList = new HashMap<String, Object>();
-      mapList.put("source", "simulate");
-      mapList.put("rowtype", "service");
-      inputConfigList.add(mapList);
-    }
-  }
-
-  private void mergeAllConfigs() {
-    loadOutputs();
-    loadInputs();
-    loadFilters();
-    
-    assignOutputsToInputs();
-  }
-
-  private void loadOutputs() {
-    for (Map<String, Object> map : outputConfigList) {
-      if (map == null) {
-        continue;
-      }
-      mergeBlocks(globalConfigs, map);
-
-      String value = (String) map.get("destination");
-      if (StringUtils.isEmpty(value)) {
-        LOG.error("Output block doesn't have destination element");
-        continue;
-      }
-      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
-      if (output == null) {
-        LOG.error("Output object could not be found");
-        continue;
-      }
-      output.setDestination(value);
-      output.loadConfig(map);
-
-      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
-      if (output.getBooleanValue("is_enabled", true)) {
-        output.logConfigs(Level.INFO);
-        outputManager.add(output);
-      } else {
-        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
-      }
-    }
-  }
-
-  private void loadInputs() {
-    for (Map<String, Object> map : inputConfigList) {
-      if (map == null) {
-        continue;
-      }
-      mergeBlocks(globalConfigs, map);
-
-      String value = (String) map.get("source");
-      if (StringUtils.isEmpty(value)) {
-        LOG.error("Input block doesn't have source element");
-        continue;
-      }
-      Input input = (Input) AliasUtil.getClassInstance(value, AliasType.INPUT);
-      if (input == null) {
-        LOG.error("Input object could not be found");
-        continue;
-      }
-      input.setType(value);
-      input.loadConfig(map);
-
-      if (input.isEnabled()) {
-        input.setOutputManager(outputManager);
-        input.setInputManager(inputManager);
-        inputManager.add(input);
-        input.logConfigs(Level.INFO);
-      } else {
-        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
-      }
-    }
-  }
-
-  private void loadFilters() {
-    sortFilters();
-
-    List<Input> toRemoveInputList = new ArrayList<Input>();
-    for (Input input : inputManager.getInputList()) {
-      for (Map<String, Object> map : filterConfigList) {
-        if (map == null) {
-          continue;
-        }
-        mergeBlocks(globalConfigs, map);
-
-        String value = (String) map.get("filter");
-        if (StringUtils.isEmpty(value)) {
-          LOG.error("Filter block doesn't have filter element");
-          continue;
-        }
-        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
-        if (filter == null) {
-          LOG.error("Filter object could not be found");
-          continue;
-        }
-        filter.loadConfig(map);
-        filter.setInput(input);
-
-        if (filter.isEnabled()) {
-          filter.setOutputManager(outputManager);
-          input.addFilter(filter);
-          filter.logConfigs(Level.INFO);
-        } else {
-          LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
-        }
-      }
-      
-      if (input.getFirstFilter() == null) {
-        toRemoveInputList.add(input);
-      }
-    }
-
-    for (Input toRemoveInput : toRemoveInputList) {
-      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
-      inputManager.removeInput(toRemoveInput);
-    }
-  }
-
-  private void sortFilters() {
-    Collections.sort(filterConfigList, new Comparator<Map<String, Object>>() {
-
-      @Override
-      public int compare(Map<String, Object> o1, Map<String, Object> o2) {
-        Object o1Sort = o1.get("sort_order");
-        Object o2Sort = o2.get("sort_order");
-        if (o1Sort == null || o2Sort == null) {
-          return 0;
-        }
-        
-        int o1Value = parseSort(o1, o1Sort);
-        int o2Value = parseSort(o2, o2Sort);
-        
-        return o1Value - o2Value;
-      }
-
-      private int parseSort(Map<String, Object> map, Object o) {
-        if (!(o instanceof Number)) {
-          try {
-            return (new Double(Double.parseDouble(o.toString()))).intValue();
-          } catch (Throwable t) {
-            LOG.error("Value is not of type Number. class=" + o.getClass().getName() + ", value=" + o.toString()
-              + ", map=" + map.toString());
-            return 0;
-          }
-        } else {
-          return ((Number) o).intValue();
-        }
-      }
-    });
-  }
-
-  private void assignOutputsToInputs() {
-    Set<Output> usedOutputSet = new HashSet<Output>();
-    for (Input input : inputManager.getInputList()) {
-      for (Output output : outputManager.getOutputs()) {
-        if (LogFeederUtil.isEnabled(output.getConfigs(), input.getConfigs())) {
-          usedOutputSet.add(output);
-          input.addOutput(output);
-        }
-      }
-    }
-    
-    // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager
-    for (Output output : InputSimulate.getSimulateOutputs()) {
-      outputManager.add(output);
-      usedOutputSet.add(output);
-    }
-    
-    outputManager.retainUsedOutputs(usedOutputSet);
-  }
-
-  @SuppressWarnings("unchecked")
-  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
-    for (String key : fromMap.keySet()) {
-      Object objValue = fromMap.get(key);
-      if (objValue == null) {
-        continue;
-      }
-      if (objValue instanceof Map) {
-        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
-
-        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
-        if (localFields == null) {
-          localFields = new HashMap<String, Object>();
-          toMap.put(key, localFields);
-        }
-
-        if (globalFields != null) {
-          for (String fieldKey : globalFields.keySet()) {
-            if (!localFields.containsKey(fieldKey)) {
-              localFields.put(fieldKey, globalFields.get(fieldKey));
-            }
-          }
-        }
-      }
-    }
-
-    // Let's add the rest of the top level fields if missing
-    for (String key : fromMap.keySet()) {
-      if (!toMap.containsKey(key)) {
-        toMap.put(key, fromMap.get(key));
-      }
-    }
+    long endTime = System.currentTimeMillis();
+    LOG.info("Took " + (endTime - startTime) + " ms to initialize");
   }
 
   private class JVMShutdownHook extends Thread {
@@ -422,10 +93,8 @@ public class LogFeeder {
       try {
         LOG.info("Processing is shutting down.");
 
-        inputManager.close();
-        outputManager.close();
-        inputManager.checkInAll();
-
+        configHandler.close();
+        config.close();
         logStats();
 
         LOG.info("LogSearch is exiting.");
@@ -436,7 +105,6 @@ public class LogFeeder {
   }
 
   private void monitor() throws Exception {
-    inputManager.monitor();
     JVMShutdownHook logfeederJVMHook = new JVMShutdownHook();
     ShutdownHookManager.get().addShutdownHook(logfeederJVMHook, LOGFEEDER_SHUTDOWN_HOOK_PRIORITY);
     
@@ -458,7 +126,7 @@ public class LogFeeder {
 
           if (System.currentTimeMillis() > (lastCheckPointCleanedMS + CHECKPOINT_CLEAN_INTERVAL_MS)) {
             lastCheckPointCleanedMS = System.currentTimeMillis();
-            inputManager.cleanCheckPointFiles();
+            configHandler.cleanCheckPointFiles();
           }
 
           if (isLogfeederCompleted) {
@@ -474,13 +142,11 @@ public class LogFeeder {
   }
 
   private void logStats() {
-    inputManager.logStats();
-    outputManager.logStats();
+    configHandler.logStats();
 
     if (metricsManager.isMetricsEnabled()) {
       List<MetricData> metricsList = new ArrayList<MetricData>();
-      inputManager.addMetricsContainers(metricsList);
-      outputManager.addMetricsContainers(metricsList);
+      configHandler.addMetrics(metricsList);
       metricsManager.useMetrics(metricsList);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java
new file mode 100644
index 0000000..effe980
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/common/ConfigHandler.java
@@ -0,0 +1,428 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.common;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.lang.reflect.Type;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.logfeeder.filter.Filter;
+import org.apache.ambari.logfeeder.input.Input;
+import org.apache.ambari.logfeeder.input.InputManager;
+import org.apache.ambari.logfeeder.input.InputSimulate;
+import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logfeeder.output.Output;
+import org.apache.ambari.logfeeder.output.OutputManager;
+import org.apache.ambari.logfeeder.util.AliasUtil;
+import org.apache.ambari.logfeeder.util.FileUtil;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.ambari.logfeeder.util.AliasUtil.AliasType;
+import org.apache.ambari.logsearch.config.api.InputConfigMonitor;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gson.reflect.TypeToken;
+
+public class ConfigHandler implements InputConfigMonitor {
+  private static final Logger LOG = Logger.getLogger(ConfigHandler.class);
+
+  private final OutputManager outputManager = new OutputManager();
+  private final InputManager inputManager = new InputManager();
+
+  public static Map<String, Object> globalConfigs = new HashMap<>();
+
+  private final List<Map<String, Object>> inputConfigList = new ArrayList<>();
+  private final List<Map<String, Object>> filterConfigList = new ArrayList<>();
+  private final List<Map<String, Object>> outputConfigList = new ArrayList<>();
+  
+  private boolean simulateMode = false;
+  
+  public ConfigHandler() {}
+  
+  public void init() throws Exception {
+    loadConfigFiles();
+    loadOutputs();
+    simulateIfNeeded();
+    
+    inputManager.init();
+    outputManager.init();
+  }
+  
+  private void loadConfigFiles() throws Exception {
+    List<String> configFiles = getConfigFiles();
+    for (String configFileName : configFiles) {
+      LOG.info("Going to load config file:" + configFileName);
+      configFileName = configFileName.replace("\\ ", "%20");
+      File configFile = new File(configFileName);
+      if (configFile.exists() && configFile.isFile()) {
+        LOG.info("Config file exists in path." + configFile.getAbsolutePath());
+        loadConfigsUsingFile(configFile);
+      } else {
+        LOG.info("Trying to load config file from classloader: " + configFileName);
+        loadConfigsUsingClassLoader(configFileName);
+        LOG.info("Loaded config file from classloader: " + configFileName);
+      }
+    }
+  }
+
+  private List<String> getConfigFiles() {
+    List<String> configFiles = new ArrayList<>();
+    
+    String logfeederConfigFilesProperty = LogFeederUtil.getStringProperty("logfeeder.config.files");
+    LOG.info("logfeeder.config.files=" + logfeederConfigFilesProperty);
+    if (logfeederConfigFilesProperty != null) {
+      configFiles.addAll(Arrays.asList(logfeederConfigFilesProperty.split(",")));
+    }
+
+    String inputConfigDir = LogFeederUtil.getStringProperty("input_config_dir");
+    if (StringUtils.isNotEmpty(inputConfigDir)) {
+      File configDirFile = new File(inputConfigDir);
+      List<File> inputConfigFiles = FileUtil.getAllFileFromDir(configDirFile, "json", false);
+      for (File inputConfigFile : inputConfigFiles) {
+        configFiles.add(inputConfigFile.getAbsolutePath());
+      }
+    }
+    
+    if (CollectionUtils.isEmpty(configFiles)) {
+      String configFileProperty = LogFeederUtil.getStringProperty("config.file", "config.json");
+      configFiles.addAll(Arrays.asList(configFileProperty.split(",")));
+    }
+    
+    return configFiles;
+  }
+
+  private void loadConfigsUsingFile(File configFile) throws Exception {
+    try {
+      String configData = FileUtils.readFileToString(configFile, Charset.defaultCharset());
+      loadConfigs(configData);
+    } catch (Exception t) {
+      LOG.error("Error opening config file. configFilePath=" + configFile.getAbsolutePath());
+      throw t;
+    }
+  }
+
+  private void loadConfigsUsingClassLoader(String configFileName) throws Exception {
+    try (BufferedInputStream fis = (BufferedInputStream) this.getClass().getClassLoader().getResourceAsStream(configFileName)) {
+      String configData = IOUtils.toString(fis, Charset.defaultCharset());
+      loadConfigs(configData);
+    }
+  }
+  
+  @Override
+  public void loadInputConfigs(String serviceName, String inputConfigData) throws Exception {
+    inputConfigList.clear();
+    filterConfigList.clear();
+    
+    loadConfigs(inputConfigData);
+    
+    if (simulateMode) {
+      InputSimulate.loadTypeToFilePath(inputConfigList);
+    } else {
+      loadInputs(serviceName);
+      loadFilters(serviceName);
+      assignOutputsToInputs(serviceName);
+      
+      inputManager.startInputs(serviceName);
+    }
+  }
+
+  @Override
+  public void removeInputs(String serviceName) {
+    inputManager.removeInputsForService(serviceName);
+  }
+
+  @SuppressWarnings("unchecked")
+  public void loadConfigs(String configData) throws Exception {
+    Type type = new TypeToken<Map<String, Object>>() {}.getType();
+    Map<String, Object> configMap = LogFeederUtil.getGson().fromJson(configData, type);
+
+    // Get the globals
+    for (String key : configMap.keySet()) {
+      switch (key) {
+        case "global" :
+          globalConfigs.putAll((Map<String, Object>) configMap.get(key));
+          break;
+        case "input" :
+          List<Map<String, Object>> inputConfig = (List<Map<String, Object>>) configMap.get(key);
+          inputConfigList.addAll(inputConfig);
+          break;
+        case "filter" :
+          List<Map<String, Object>> filterConfig = (List<Map<String, Object>>) configMap.get(key);
+          filterConfigList.addAll(filterConfig);
+          break;
+        case "output" :
+          List<Map<String, Object>> outputConfig = (List<Map<String, Object>>) configMap.get(key);
+          outputConfigList.addAll(outputConfig);
+          break;
+        default :
+          LOG.warn("Unknown config key: " + key);
+      }
+    }
+  }
+  
+  private void simulateIfNeeded() throws Exception {
+    int simulatedInputNumber = LogFeederUtil.getIntProperty("logfeeder.simulate.input_number", 0);
+    if (simulatedInputNumber == 0)
+      return;
+    
+    List<Map<String, Object>> simulateInputConfigList = new ArrayList<>();
+    for (int i = 0; i < simulatedInputNumber; i++) {
+      HashMap<String, Object> mapList = new HashMap<String, Object>();
+      mapList.put("source", "simulate");
+      mapList.put("rowtype", "service");
+      simulateInputConfigList.add(mapList);
+    }
+    
+    Map<String, List<Map<String, Object>>> simulateInputConfigMap = ImmutableMap.of("input", simulateInputConfigList);
+    String simulateInputConfig = LogFeederUtil.getGson().toJson(simulateInputConfigMap);
+    loadInputConfigs("Simulation", simulateInputConfig);
+    
+    simulateMode = true;
+  }
+
+  private void loadOutputs() {
+    for (Map<String, Object> map : outputConfigList) {
+      if (map == null) {
+        continue;
+      }
+      mergeBlocks(globalConfigs, map);
+
+      String value = (String) map.get("destination");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Output block doesn't have destination element");
+        continue;
+      }
+      Output output = (Output) AliasUtil.getClassInstance(value, AliasType.OUTPUT);
+      if (output == null) {
+        LOG.error("Output object could not be found");
+        continue;
+      }
+      output.setDestination(value);
+      output.loadConfig(map);
+
+      // We will only check for is_enabled out here. Down below we will check whether this output is enabled for the input
+      if (output.getBooleanValue("is_enabled", true)) {
+        output.logConfigs(Level.INFO);
+        outputManager.add(output);
+      } else {
+        LOG.info("Output is disabled. So ignoring it. " + output.getShortDescription());
+      }
+    }
+  }
+
+  private void loadInputs(String serviceName) {
+    for (Map<String, Object> map : inputConfigList) {
+      if (map == null) {
+        continue;
+      }
+      mergeBlocks(globalConfigs, map);
+
+      String value = (String) map.get("source");
+      if (StringUtils.isEmpty(value)) {
+        LOG.error("Input block doesn't have source element");
+        continue;
+      }
+      Input input = (Input) AliasUtil.getClassInstance(value, AliasType.INPUT);
+      if (input == null) {
+        LOG.error("Input object could not be found");
+        continue;
+      }
+      input.setType(value);
+      input.loadConfig(map);
+
+      if (input.isEnabled()) {
+        input.setOutputManager(outputManager);
+        input.setInputManager(inputManager);
+        inputManager.add(serviceName, input);
+        input.logConfigs(Level.INFO);
+      } else {
+        LOG.info("Input is disabled. So ignoring it. " + input.getShortDescription());
+      }
+    }
+  }
+
+  private void loadFilters(String serviceName) {
+    sortFilters();
+
+    List<Input> toRemoveInputList = new ArrayList<Input>();
+    for (Input input : inputManager.getInputList(serviceName)) {
+      for (Map<String, Object> map : filterConfigList) {
+        if (map == null) {
+          continue;
+        }
+        mergeBlocks(globalConfigs, map);
+
+        String value = (String) map.get("filter");
+        if (StringUtils.isEmpty(value)) {
+          LOG.error("Filter block doesn't have filter element");
+          continue;
+        }
+        Filter filter = (Filter) AliasUtil.getClassInstance(value, AliasType.FILTER);
+        if (filter == null) {
+          LOG.error("Filter object could not be found");
+          continue;
+        }
+        filter.loadConfig(map);
+        filter.setInput(input);
+
+        if (filter.isEnabled()) {
+          filter.setOutputManager(outputManager);
+          input.addFilter(filter);
+          filter.logConfigs(Level.INFO);
+        } else {
+          LOG.debug("Ignoring filter " + filter.getShortDescription() + " for input " + input.getShortDescription());
+        }
+      }
+      
+      if (input.getFirstFilter() == null) {
+        toRemoveInputList.add(input);
+      }
+    }
+
+    for (Input toRemoveInput : toRemoveInputList) {
+      LOG.warn("There are no filters, we will ignore this input. " + toRemoveInput.getShortDescription());
+      inputManager.removeInput(toRemoveInput);
+    }
+  }
+
+  private void sortFilters() {
+    Collections.sort(filterConfigList, new Comparator<Map<String, Object>>() {
+
+      @Override
+      public int compare(Map<String, Object> o1, Map<String, Object> o2) {
+        Object o1Sort = o1.get("sort_order");
+        Object o2Sort = o2.get("sort_order");
+        if (o1Sort == null || o2Sort == null) {
+          return 0;
+        }
+        
+        int o1Value = parseSort(o1, o1Sort);
+        int o2Value = parseSort(o2, o2Sort);
+        
+        return o1Value - o2Value;
+      }
+
+      private int parseSort(Map<String, Object> map, Object o) {
+        if (!(o instanceof Number)) {
+          try {
+            return (new Double(Double.parseDouble(o.toString()))).intValue();
+          } catch (Throwable t) {
+            LOG.error("Value is not of type Number. class=" + o.getClass().getName() + ", value=" + o.toString()
+              + ", map=" + map.toString());
+            return 0;
+          }
+        } else {
+          return ((Number) o).intValue();
+        }
+      }
+    });
+  }
+
+  private void assignOutputsToInputs(String serviceName) {
+    Set<Output> usedOutputSet = new HashSet<Output>();
+    for (Input input : inputManager.getInputList(serviceName)) {
+      for (Output output : outputManager.getOutputs()) {
+        if (LogFeederUtil.isEnabled(output.getConfigs(), input.getConfigs())) {
+          usedOutputSet.add(output);
+          input.addOutput(output);
+        }
+      }
+    }
+    
+    // In case of simulation copies of the output are added for each simulation instance, these must be added to the manager
+    for (Output output : InputSimulate.getSimulateOutputs()) {
+      outputManager.add(output);
+      usedOutputSet.add(output);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private void mergeBlocks(Map<String, Object> fromMap, Map<String, Object> toMap) {
+    for (String key : fromMap.keySet()) {
+      Object objValue = fromMap.get(key);
+      if (objValue == null) {
+        continue;
+      }
+      if (objValue instanceof Map) {
+        Map<String, Object> globalFields = LogFeederUtil.cloneObject((Map<String, Object>) objValue);
+
+        Map<String, Object> localFields = (Map<String, Object>) toMap.get(key);
+        if (localFields == null) {
+          localFields = new HashMap<String, Object>();
+          toMap.put(key, localFields);
+        }
+
+        if (globalFields != null) {
+          for (String fieldKey : globalFields.keySet()) {
+            if (!localFields.containsKey(fieldKey)) {
+              localFields.put(fieldKey, globalFields.get(fieldKey));
+            }
+          }
+        }
+      }
+    }
+
+    // Let's add the rest of the top level fields if missing
+    for (String key : fromMap.keySet()) {
+      if (!toMap.containsKey(key)) {
+        toMap.put(key, fromMap.get(key));
+      }
+    }
+  }
+
+  public void cleanCheckPointFiles() {
+    inputManager.cleanCheckPointFiles();
+  }
+
+  public void logStats() {
+    inputManager.logStats();
+    outputManager.logStats();
+  }
+  
+  public void addMetrics(List<MetricData> metricsList) {
+    inputManager.addMetricsContainers(metricsList);
+    outputManager.addMetricsContainers(metricsList);
+  }
+
+  public void waitOnAllInputs() {
+    inputManager.waitOnAllInputs();
+  }
+
+  public void close() {
+    inputManager.close();
+    outputManager.close();
+    inputManager.checkInAll();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java
new file mode 100644
index 0000000..b70fbd1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputConfigUploader.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logfeeder.input;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.nio.charset.Charset;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.log4j.Logger;
+
+import com.google.common.io.Files;
+
+public class InputConfigUploader extends Thread {
+  protected static final Logger LOG = Logger.getLogger(InputConfigUploader.class);
+
+  private static final long SLEEP_BETWEEN_CHECK = 2000;
+
+  private final File configDir;
+  private final FilenameFilter inputConfigFileFilter = new FilenameFilter() {
+    @Override
+    public boolean accept(File dir, String name) {
+      return name.startsWith("input.config-") && name.endsWith(".json");
+    }
+  };
+  private final Set<String> filesHandled = new HashSet<>();
+  private final Pattern serviceNamePattern = Pattern.compile("input.config-(.+).json");
+  private final LogSearchConfig config;
+  private final String clusterName = LogFeederUtil.getStringProperty("cluster.name");
+  
+  public static void load(LogSearchConfig config) {
+    new InputConfigUploader(config).start();
+  }
+  
+  private InputConfigUploader(LogSearchConfig config) {
+    super("Input Config Loader");
+    setDaemon(true);
+    
+    this.configDir = new File(LogFeederUtil.getStringProperty("logfeeder.config.dir"));
+    this.config = config;
+  }
+  
+  @Override
+  public void run() {
+    while (true) {
+      File[] inputConfigFiles = configDir.listFiles(inputConfigFileFilter);
+      for (File inputConfigFile : inputConfigFiles) {
+        if (!filesHandled.contains(inputConfigFile.getAbsolutePath())) {
+          try {
+            Matcher m = serviceNamePattern.matcher(inputConfigFile.getName());
+            m.find();
+            String serviceName = m.group(1);
+            String inputConfig = Files.toString(inputConfigFile, Charset.defaultCharset());
+            
+            if (!config.inputConfigExists(clusterName, serviceName)) {
+              config.setInputConfig(clusterName, serviceName, inputConfig);
+            }
+            filesHandled.add(inputConfigFile.getAbsolutePath());
+          } catch (Exception e) {
+            LOG.warn("Error handling file " + inputConfigFile.getAbsolutePath(), e);
+          }
+        }
+      }
+      
+      try {
+        Thread.sleep(SLEEP_BETWEEN_CHECK);
+      } catch (InterruptedException e) {
+        LOG.debug("Interrupted during sleep", e);
+      }
+    }
+  }
+}


[4/5] ambari git commit: AMBARI-20578 Log Search Configuration API (mgergely)

Posted by mg...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
index 8e70850..8c76785 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputManager.java
@@ -25,6 +25,7 @@ import java.io.FileFilter;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
@@ -46,101 +47,163 @@ public class InputManager {
   private static final String CHECKPOINT_SUBFOLDER_NAME = "logfeeder_checkpoints";
   public static final String DEFAULT_CHECKPOINT_EXTENSION = ".cp";
   
-  private List<Input> inputList = new ArrayList<Input>();
+  private Map<String, List<Input>> inputs = new HashMap<>();
   private Set<Input> notReadyList = new HashSet<Input>();
 
   private boolean isDrain = false;
-  private boolean isAnyInputTail = false;
 
-  private File checkPointFolderFile = null;
+  private String checkPointExtension;
+  private File checkPointFolderFile;
 
   private MetricData filesCountMetric = new MetricData("input.files.count", true);
-
-  private String checkPointExtension;
   
-  private Thread inputIsReadyMonitor = null;
+  private Thread inputIsReadyMonitor;
 
-  public List<Input> getInputList() {
-    return inputList;
+  public List<Input> getInputList(String serviceName) {
+    return inputs.get(serviceName);
   }
 
-  public void add(Input input) {
+  public void add(String serviceName, Input input) {
+    List<Input> inputList = inputs.get(serviceName);
+    if (inputList == null) {
+      inputList = new ArrayList<>();
+      inputs.put(serviceName, inputList);
+    }
     inputList.add(input);
   }
 
+  public void removeInputsForService(String serviceName) {
+    List<Input> inputList = inputs.get(serviceName);
+    for (Input input : inputList) {
+      input.setDrain(true);
+    }
+    inputList.clear();
+    inputs.remove(serviceName);
+  }
+
   public void removeInput(Input input) {
     LOG.info("Trying to remove from inputList. " + input.getShortDescription());
-    Iterator<Input> iter = inputList.iterator();
-    while (iter.hasNext()) {
-      Input iterInput = iter.next();
-      if (iterInput.equals(input)) {
-        LOG.info("Removing Input from inputList. " + input.getShortDescription());
-        iter.remove();
+    for (List<Input> inputList : inputs.values()) {
+      Iterator<Input> iter = inputList.iterator();
+      while (iter.hasNext()) {
+        Input iterInput = iter.next();
+        if (iterInput.equals(input)) {
+          LOG.info("Removing Input from inputList. " + input.getShortDescription());
+          iter.remove();
+        }
       }
     }
   }
 
   private int getActiveFilesCount() {
     int count = 0;
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        count++;
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (input.isReady()) {
+          count++;
+        }
       }
     }
     return count;
   }
 
   public void init() {
+    initCheckPointSettings();
+    startMonitorThread();
+  }
+  
+  private void initCheckPointSettings() {
     checkPointExtension = LogFeederUtil.getStringProperty("logfeeder.checkpoint.extension", DEFAULT_CHECKPOINT_EXTENSION);
-    for (Input input : inputList) {
-      try {
-        input.init();
-        if (input.isTail()) {
-          isAnyInputTail = true;
-        }
-      } catch (Exception e) {
-        LOG.error("Error initializing input. " + input.getShortDescription(), e);
-      }
+    LOG.info("Determining valid checkpoint folder");
+    boolean isCheckPointFolderValid = false;
+    // We need to keep track of the files we are reading.
+    String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
+    if (!StringUtils.isEmpty(checkPointFolder)) {
+      checkPointFolderFile = new File(checkPointFolder);
+      isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
     }
-
-    if (isAnyInputTail) {
-      LOG.info("Determining valid checkpoint folder");
-      boolean isCheckPointFolderValid = false;
-      // We need to keep track of the files we are reading.
-      String checkPointFolder = LogFeederUtil.getStringProperty("logfeeder.checkpoint.folder");
-      if (!StringUtils.isEmpty(checkPointFolder)) {
-        checkPointFolderFile = new File(checkPointFolder);
+    if (!isCheckPointFolderValid) {
+      // Let's try home folder
+      String userHome = LogFeederUtil.getStringProperty("user.home");
+      if (userHome != null) {
+        checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
+        LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
         isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
       }
-      if (!isCheckPointFolderValid) {
-        // Let's try home folder
-        String userHome = LogFeederUtil.getStringProperty("user.home");
-        if (userHome != null) {
-          checkPointFolderFile = new File(userHome, CHECKPOINT_SUBFOLDER_NAME);
-          LOG.info("Checking if home folder can be used for checkpoints. Folder=" + checkPointFolderFile);
-          isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        }
-      }
-      if (!isCheckPointFolderValid) {
-        // Let's use tmp folder
-        String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
-        if (tmpFolder == null) {
-          tmpFolder = "/tmp";
-        }
-        checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
-        LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
-        isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
-        if (isCheckPointFolderValid) {
-          LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
-              "Please set logfeeder.checkpoint.folder property");
-        }
+    }
+    if (!isCheckPointFolderValid) {
+      // Let's use tmp folder
+      String tmpFolder = LogFeederUtil.getStringProperty("java.io.tmpdir");
+      if (tmpFolder == null) {
+        tmpFolder = "/tmp";
       }
-
+      checkPointFolderFile = new File(tmpFolder, CHECKPOINT_SUBFOLDER_NAME);
+      LOG.info("Checking if tmps folder can be used for checkpoints. Folder=" + checkPointFolderFile);
+      isCheckPointFolderValid = verifyCheckPointFolder(checkPointFolderFile);
       if (isCheckPointFolderValid) {
-        LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
+        LOG.warn("Using tmp folder " + checkPointFolderFile + " to store check points. This is not recommended." +
+            "Please set logfeeder.checkpoint.folder property");
       }
     }
+    
+    if (isCheckPointFolderValid) {
+      LOG.info("Using folder " + checkPointFolderFile + " for storing checkpoints");
+    }
+  }
 
+  private void startMonitorThread() {
+    inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
+      @Override
+      public void run() {
+        LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
+        while (true) {
+          if (isDrain) {
+            LOG.info("Exiting missing file monitor.");
+            break;
+          }
+          try {
+            Iterator<Input> iter = notReadyList.iterator();
+            while (iter.hasNext()) {
+              Input input = iter.next();
+              try {
+                if (input.isReady()) {
+                  input.monitor();
+                  iter.remove();
+                }
+              } catch (Throwable t) {
+                LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
+              }
+            }
+            Thread.sleep(30 * 1000);
+          } catch (Throwable t) {
+            // Ignore
+          }
+        }
+      }
+    };
+    
+    inputIsReadyMonitor.start();
+  }
+  
+  public void startInputs(String serviceName) {
+    for (Input input : inputs.get(serviceName)) {
+      try {
+        input.init();
+        if (input.isReady()) {
+          input.monitor();
+        } else {
+          if (input.isTail()) {
+            LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
+                "So it might not be an issue. " + input.getShortDescription());
+            notReadyList.add(input);
+          } else {
+            LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
+          }
+        }
+      } catch (Exception e) {
+        LOG.error("Error initializing input. " + input.getShortDescription(), e);
+      }
+    }
   }
 
   private boolean verifyCheckPointFolder(File folderPathFile) {
@@ -171,70 +234,25 @@ public class InputManager {
     return checkPointFolderFile;
   }
 
-  public void monitor() {
-    for (Input input : inputList) {
-      if (input.isReady()) {
-        input.monitor();
-      } else {
-        if (input.isTail()) {
-          LOG.info("Adding input to not ready list. Note, it is possible this component is not run on this host. " +
-              "So it might not be an issue. " + input.getShortDescription());
-          notReadyList.add(input);
-        } else {
-          LOG.info("Input is not ready, so going to ignore it " + input.getShortDescription());
-        }
-      }
-    }
-    // Start the monitoring thread if any file is in tail mode
-    if (isAnyInputTail) {
-       inputIsReadyMonitor = new Thread("InputIsReadyMonitor") {
-        @Override
-        public void run() {
-          LOG.info("Going to monitor for these missing files: " + notReadyList.toString());
-          while (true) {
-            if (isDrain) {
-              LOG.info("Exiting missing file monitor.");
-              break;
-            }
-            try {
-              Iterator<Input> iter = notReadyList.iterator();
-              while (iter.hasNext()) {
-                Input input = iter.next();
-                try {
-                  if (input.isReady()) {
-                    input.monitor();
-                    iter.remove();
-                  }
-                } catch (Throwable t) {
-                  LOG.error("Error while enabling monitoring for input. " + input.getShortDescription());
-                }
-              }
-              Thread.sleep(30 * 1000);
-            } catch (Throwable t) {
-              // Ignore
-            }
-          }
-        }
-      };
-      inputIsReadyMonitor.start();
-    }
-  }
-
   void addToNotReady(Input notReadyInput) {
     notReadyList.add(notReadyInput);
   }
 
   public void addMetricsContainers(List<MetricData> metricsList) {
-    for (Input input : inputList) {
-      input.addMetricsContainers(metricsList);
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.addMetricsContainers(metricsList);
+      }
     }
     filesCountMetric.value = getActiveFilesCount();
     metricsList.add(filesCountMetric);
   }
 
   public void logStats() {
-    for (Input input : inputList) {
-      input.logStat();
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.logStat();
+      }
     }
 
     filesCountMetric.value = getActiveFilesCount();
@@ -308,14 +326,16 @@ public class InputManager {
 
   public void waitOnAllInputs() {
     //wait on inputs
-    for (Input input : inputList) {
-      if (input != null) {
-        Thread inputThread = input.getThread();
-        if (inputThread != null) {
-          try {
-            inputThread.join();
-          } catch (InterruptedException e) {
-            // ignore
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (input != null) {
+          Thread inputThread = input.getThread();
+          if (inputThread != null) {
+            try {
+              inputThread.join();
+            } catch (InterruptedException e) {
+              // ignore
+            }
           }
         }
       }
@@ -332,17 +352,21 @@ public class InputManager {
   }
 
   public void checkInAll() {
-    for (Input input : inputList) {
-      input.lastCheckIn();
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        input.lastCheckIn();
+      }
     }
   }
 
   public void close() {
-    for (Input input : inputList) {
-      try {
-        input.setDrain(true);
-      } catch (Throwable t) {
-        LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        try {
+          input.setDrain(true);
+        } catch (Throwable t) {
+          LOG.error("Error while draining. input=" + input.getShortDescription(), t);
+        }
       }
     }
     isDrain = true;
@@ -352,14 +376,16 @@ public class InputManager {
     int waitTimeMS = 1000;
     for (int i = 0; i < iterations; i++) {
       boolean allClosed = true;
-      for (Input input : inputList) {
-        if (!input.isClosed()) {
-          try {
-            allClosed = false;
-            LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
-            Thread.sleep(waitTimeMS);
-          } catch (Throwable t) {
-            // Ignore
+      for (List<Input> inputList : inputs.values()) {
+        for (Input input : inputList) {
+          if (!input.isClosed()) {
+            try {
+              allClosed = false;
+              LOG.warn("Waiting for input to close. " + input.getShortDescription() + ", " + (iterations - i) + " more seconds");
+              Thread.sleep(waitTimeMS);
+            } catch (Throwable t) {
+              // Ignore
+            }
           }
         }
       }
@@ -370,9 +396,11 @@ public class InputManager {
     }
     
     LOG.warn("Some inputs were not closed after " + iterations + " iterations");
-    for (Input input : inputList) {
-      if (!input.isClosed()) {
-        LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+    for (List<Input> inputList : inputs.values()) {
+      for (Input input : inputList) {
+        if (!input.isClosed()) {
+          LOG.warn("Input not closed. Will ignore it." + input.getShortDescription());
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
index 2222f93..d193cdb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/input/InputSimulate.java
@@ -35,6 +35,7 @@ import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.filter.FilterJSON;
 import org.apache.ambari.logfeeder.output.Output;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
+import org.apache.commons.collections.MapUtils;
 import org.apache.log4j.Logger;
 import org.apache.solr.common.util.Base64;
 
@@ -46,10 +47,12 @@ public class InputSimulate extends Input {
   private static final String LOG_TEXT_PATTERN = "{ logtime=\"%d\", level=\"%s\", log_message=\"%s\", host=\"%s\"}";
   
   private static final Map<String, String> typeToFilePath = new HashMap<>();
+  private static List<String> inputTypes = new ArrayList<>();
   public static void loadTypeToFilePath(List<Map<String, Object>> inputList) {
     for (Map<String, Object> input : inputList) {
       if (input.containsKey("type") && input.containsKey("path")) {
         typeToFilePath.put((String)input.get("type"), (String)input.get("path"));
+        inputTypes.add((String)input.get("type"));
       }
     }
   }
@@ -90,13 +93,9 @@ public class InputSimulate extends Input {
   
   private List<String> getSimulatedLogTypes() {
     String logsToSimulate = LogFeederUtil.getStringProperty("logfeeder.simulate.log_ids");
-    if (logsToSimulate == null) {
-      return new ArrayList<>(typeToFilePath.keySet());
-    } else {
-      List<String> simulatedLogTypes = Arrays.asList(logsToSimulate.split(","));
-      simulatedLogTypes.retainAll(typeToFilePath.keySet());
-      return simulatedLogTypes;
-    }
+    return (logsToSimulate == null) ?
+      inputTypes :
+      Arrays.asList(logsToSimulate.split(","));
   }
 
   @Override
@@ -120,11 +119,12 @@ public class InputSimulate extends Input {
 
   @Override
   void start() throws Exception {
-    if (types.isEmpty())
-      return;
-    
     getFirstFilter().setOutputManager(outputManager);
     while (true) {
+      if (types.isEmpty()) {
+        try { Thread.sleep(sleepMillis); } catch(Exception e) { /* Ignore */ }
+        continue;
+      }
       String type = imitateRandomLogFile();
       
       String line = getLine();
@@ -139,7 +139,7 @@ public class InputSimulate extends Input {
   private String imitateRandomLogFile() {
     int typePos = random.nextInt(types.size());
     String type = types.get(typePos);
-    String filePath = typeToFilePath.get(type);
+    String filePath = MapUtils.getString(typeToFilePath, type, "path of " + type);
     
     configs.put("type", type);
     setFilePath(filePath);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
index 3c80e50..9a9f973 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logfeeder.output;
 
 import java.io.File;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
@@ -30,10 +29,8 @@ import java.util.UUID;
 import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
-import org.apache.ambari.logfeeder.input.cache.LRUCache;
 import org.apache.ambari.logfeeder.logconfig.FilterLogData;
 import org.apache.ambari.logfeeder.metrics.MetricData;
-import org.apache.ambari.logfeeder.util.DateUtil;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.MurmurHash;
 import org.apache.commons.lang3.StringUtils;
@@ -63,10 +60,6 @@ public class OutputManager {
     this.outputs.add(output);
   }
 
-  public void retainUsedOutputs(Collection<Output> usedOutputs) {
-    outputs.retainAll(usedOutputs);
-  }
-
   public void init() throws Exception {
     for (Output output : outputs) {
       output.init();

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
index 26f1ddb..d0f51b2 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
@@ -21,7 +21,7 @@ package org.apache.ambari.logfeeder.output;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
-import org.apache.ambari.logfeeder.LogFeeder;
+import org.apache.ambari.logfeeder.common.ConfigHandler;
 import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
@@ -133,7 +133,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
 
 
   private Map<String, Object> getGlobalConfig() {
-    Map<String, Object> globalConfig = LogFeeder.globalConfigs;
+    Map<String, Object> globalConfig = ConfigHandler.globalConfigs;
     if (globalConfig == null) {
       globalConfig = new HashMap<>();
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
index 73cf449..bb2f0a9 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
@@ -80,6 +80,9 @@ public class LogFeederUtil {
   }
   
   private static Properties props;
+  public static Properties getProperties() {
+    return props;
+  }
 
   /**
    * This method will read the properties from System, followed by propFile and finally from the map

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
index 80b34e0..d963de3 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SSLUtil.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 
 import java.io.File;
+import java.nio.charset.Charset;
 
 public class SSLUtil {
   private static final Logger LOG = Logger.getLogger(SSLUtil.class);
@@ -120,10 +121,10 @@ public class SSLUtil {
     try {
       File pwdFile = new File(LOGFEEDER_CERT_DEFAULT_FOLDER, fileName);
       if (!pwdFile.exists()) {
-        FileUtils.writeStringToFile(pwdFile, LOGFEEDER_STORE_DEFAULT_PASSWORD);
+        FileUtils.writeStringToFile(pwdFile, LOGFEEDER_STORE_DEFAULT_PASSWORD, Charset.defaultCharset());
         return LOGFEEDER_STORE_DEFAULT_PASSWORD;
       } else {
-        return FileUtils.readFileToString(pwdFile);
+        return FileUtils.readFileToString(pwdFile, Charset.defaultCharset());
       }
     } catch (Exception e) {
       LOG.warn("Exception occurred during read/write password file for keystore/truststore.", e);

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
index 7ef967c..8a3d26d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml
@@ -38,9 +38,9 @@
   <appender name="rolling_file_json"
     class="org.apache.ambari.logsearch.appender.LogsearchRollingFileAppender">
     <param name="file" value="logs/logsearch-logfeeder.json" />
-		<param name="append" value="true" />
-		<param name="maxFileSize" value="10MB" />
-		<param name="maxBackupIndex" value="10" />
+    <param name="append" value="true" />
+    <param name="maxFileSize" value="10MB" />
+    <param name="maxBackupIndex" value="10" />
     <layout class="org.apache.ambari.logsearch.appender.LogsearchConversion" />
   </appender> 
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
index 08aa564..522f6bb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
@@ -20,6 +20,7 @@ package org.apache.ambari.logfeeder.input;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -180,7 +181,7 @@ public class InputFileTest {
 
   private File createFile(String filename) throws IOException {
     File newFile = new File(FileUtils.getTempDirectoryPath() + TEST_DIR_NAME + filename);
-    FileUtils.writeStringToFile(newFile, TEST_LOG_FILE_CONTENT);
+    FileUtils.writeStringToFile(newFile, TEST_LOG_FILE_CONTENT, Charset.defaultCharset());
     return newFile;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
index 368a930..e9bbe7e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputManagerTest.java
@@ -42,23 +42,23 @@ public class InputManagerTest {
     replay(input1, input2, input3, input4);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.removeInput(input3);
     manager.removeInput(input4);
     
     verify(input1, input2, input3, input4);
     
-    List<Input> inputList = manager.getInputList();
+    List<Input> inputList = manager.getInputList("serviceName");
     assertEquals(inputList.size(), 2);
     assertEquals(inputList.get(0), input1);
     assertEquals(inputList.get(1), input2);
   }
 
   @Test
-  public void testInputManager_init() throws Exception {
+  public void testInputManager_monitor() throws Exception {
     Input input1 = strictMock(Input.class);
     Input input2 = strictMock(Input.class);
     Input input3 = strictMock(Input.class);
@@ -67,28 +67,6 @@ public class InputManagerTest {
     input2.init(); expectLastCall();
     input3.init(); expectLastCall();
     
-    expect(input1.isTail()).andReturn(false);
-    expect(input2.isTail()).andReturn(false);
-    expect(input3.isTail()).andReturn(false);
-    
-    replay(input1, input2, input3);
-    
-    InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
-    
-    manager.init();
-    
-    verify(input1, input2, input3);
-  }
-
-  @Test
-  public void testInputManager_monitor() throws Exception {
-    Input input1 = strictMock(Input.class);
-    Input input2 = strictMock(Input.class);
-    Input input3 = strictMock(Input.class);
-    
     expect(input1.isReady()).andReturn(true);
     expect(input2.isReady()).andReturn(true);
     expect(input3.isReady()).andReturn(false);
@@ -101,11 +79,11 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
-    manager.monitor();
+    manager.startInputs("serviceName");
     
     verify(input1, input2, input3);
   }
@@ -130,9 +108,9 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.addMetricsContainers(metrics);
     
@@ -156,9 +134,9 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.logStats();
     
@@ -182,9 +160,9 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.waitOnAllInputs();
     
@@ -204,9 +182,9 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.checkInAll();
     
@@ -230,9 +208,9 @@ public class InputManagerTest {
     replay(input1, input2, input3);
     
     InputManager manager = new InputManager();
-    manager.add(input1);
-    manager.add(input2);
-    manager.add(input3);
+    manager.add("serviceName", input1);
+    manager.add("serviceName", input2);
+    manager.add("serviceName", input3);
     
     manager.close();
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
index 0a0a195..cf1d25a 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
@@ -37,25 +37,22 @@ import org.junit.Test;
 public class OutputManagerTest {
 
   @Test
-  public void testOutputManager_addAndRemoveOutputs() {
+  public void testOutputManager_addOutputs() {
     Output output1 = strictMock(Output.class);
     Output output2 = strictMock(Output.class);
     Output output3 = strictMock(Output.class);
-    Output output4 = strictMock(Output.class);
     
-    replay(output1, output2, output3, output4);
+    replay(output1, output2, output3);
     
     OutputManager manager = new OutputManager();
     manager.add(output1);
     manager.add(output2);
     manager.add(output3);
     
-    manager.retainUsedOutputs(Arrays.asList(output1, output2, output4));
-    
-    verify(output1, output2, output3, output4);
+    verify(output1, output2, output3);
     
     List<Output> outputs = manager.getOutputs();
-    assertEquals(outputs.size(), 2);
+    assertEquals(outputs.size(), 3);
     assertEquals(outputs.get(0), output1);
     assertEquals(outputs.get(1), output2);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/pom.xml b/ambari-logsearch/ambari-logsearch-server/pom.xml
index 52bda8d..b505c12 100755
--- a/ambari-logsearch/ambari-logsearch-server/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-server/pom.xml
@@ -1,4 +1,3 @@
-<?xml version="1.0"?>
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more
    contributor license agreements.  See the NOTICE file distributed with
@@ -573,6 +572,18 @@
           <groupId>com.sun.jersey</groupId>
           <artifactId>jersey-server</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-framework</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-recipes</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -587,6 +598,11 @@
     </dependency>
     <dependency>
       <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-logsearch-config-zookeeper</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
       <artifactId>ambari-metrics-common</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index b75da0e..45281b6 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -82,6 +82,7 @@ public class LogSearch {
   public void run(String[] argv) throws Exception {
     SSLUtil.ensureStorePasswords();
     SSLUtil.loadKeystore();
+    
     Server server = buildSever(argv);
     HandlerList handlers = new HandlerList();
     handlers.addHandler(createSwaggerContext());

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
index 73a43ad..a2a7f5e 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/common/PropertiesHelper.java
@@ -40,6 +40,9 @@ public class PropertiesHelper extends PropertyPlaceholderConfigurer {
   private static final String LOGSEARCH_PROP_FILE="logsearch.properties";
   
   private static Map<String, String> propertiesMap;
+  public static Map<String, String> getProperties() {
+    return propertiesMap;
+  }
 
   private PropertiesHelper() {
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java
new file mode 100644
index 0000000..7ca701d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/conf/global/LogSearchConfigState.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.conf.global;
+
+import javax.inject.Named;
+
+@Named
+public class LogSearchConfigState {
+  private volatile boolean logSearchConfigAvailable;
+
+  public boolean isLogSearchConfigAvailable() {
+    return logSearchConfigAvailable;
+  }
+
+  public void setLogSearchConfigAvailable(boolean logSearchConfigAvailable) {
+    this.logSearchConfigAvailable = logSearchConfigAvailable;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java
new file mode 100644
index 0000000..141299c
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/Configurer.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ambari.logsearch.configurer;
+
+interface Configurer {
+  void start();
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java
new file mode 100644
index 0000000..978e91a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogSearchConfigConfigurer.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.configurer;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.apache.ambari.logsearch.common.PropertiesHelper;
+import org.apache.ambari.logsearch.conf.global.LogSearchConfigState;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig;
+import org.apache.ambari.logsearch.config.api.LogSearchConfigFactory;
+import org.apache.ambari.logsearch.config.api.LogSearchConfig.Component;
+import org.apache.ambari.logsearch.config.zookeeper.LogSearchConfigZK;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Named
+public class LogSearchConfigConfigurer implements Configurer {
+  private static final Logger logger = LoggerFactory.getLogger(LogSearchConfigConfigurer.class);
+  
+  private static final int RETRY_INTERVAL_SECONDS = 10;
+  
+  private static LogSearchConfig logSearchConfig;
+  public static LogSearchConfig getConfig() {
+    return logSearchConfig;
+  }
+  
+  @Inject
+  private LogSearchConfigState logSearchConfigState;
+  
+  public void start() {
+    Thread setupThread = new Thread("setup_logsearch_config") {
+      @Override
+      public void run() {
+        logger.info("Started thread to set up log search config");
+        while (true) {
+          try {
+            logSearchConfig = LogSearchConfigFactory.createLogSearchConfig(Component.SERVER, PropertiesHelper.getProperties(),
+                LogSearchConfigZK.class);
+            logSearchConfigState.setLogSearchConfigAvailable(true);
+            break;
+          } catch (Exception e) {
+            logger.warn("Could not initialize Log Search config, going to sleep for " + RETRY_INTERVAL_SECONDS + " seconds ", e);
+            try { Thread.sleep(RETRY_INTERVAL_SECONDS * 1000); } catch (Exception e2) {/* ignore */}
+          }
+        }
+      }
+    };
+    setupThread.setDaemon(true);
+    setupThread.start();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogfeederFilterConfigurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogfeederFilterConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogfeederFilterConfigurer.java
index 34e1bec..c2d27f9 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogfeederFilterConfigurer.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/LogfeederFilterConfigurer.java
@@ -24,7 +24,7 @@ import org.apache.ambari.logsearch.dao.UserConfigSolrDao;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-public class LogfeederFilterConfigurer implements SolrConfigurer {
+public class LogfeederFilterConfigurer implements Configurer {
 
   private static final Logger LOG = LoggerFactory.getLogger(LogfeederFilterConfigurer.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java
index c80a10d..1eca94b 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrAuditAliasConfigurer.java
@@ -36,7 +36,7 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
-public class SolrAuditAliasConfigurer implements SolrConfigurer {
+public class SolrAuditAliasConfigurer implements Configurer {
 
   private static final Logger LOG = LoggerFactory.getLogger(SolrAuditAliasConfigurer.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java
index 7edc6aa..2a4c790 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrCollectionConfigurer.java
@@ -44,7 +44,7 @@ import java.util.List;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 
-public class SolrCollectionConfigurer implements SolrConfigurer {
+public class SolrCollectionConfigurer implements Configurer {
 
   private Logger LOG = LoggerFactory.getLogger(SolrCollectionConfigurer.class);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrConfigurer.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrConfigurer.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrConfigurer.java
deleted file mode 100644
index 67cb9d1..0000000
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/configurer/SolrConfigurer.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ambari.logsearch.configurer;
-
-interface SolrConfigurer {
-  void start();
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
index 984e834..2afa3dd 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/doc/DocConstants.java
@@ -116,6 +116,12 @@ public class DocConstants {
     public static final String GET_ALL_USER_NAMES_OD = "Get all user names";
   }
 
+  public class ShipperConfigOperationDescriptions {
+    public static final String GET_SERVICE_NAMES = "Get service names";
+    public static final String GET_SHIPPER_CONFIG = "Get shipper config";
+    public static final String SET_SHIPPER_CONFIG = "Set shipper config";
+  }
+
   public class StatusOperationDescriptions {
     public static final String STATUS_OD = "Get statuses for collections (not health state - show true if something already done)";
     public static final String SERVICE_LOGS_STATUS_OD = "Get statuses for service log collection (not health state - show true if something already done)";

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
new file mode 100644
index 0000000..251619b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.manager;
+
+import java.util.List;
+
+import org.apache.ambari.logsearch.conf.global.LogSearchConfigState;
+import org.apache.ambari.logsearch.configurer.LogSearchConfigConfigurer;
+import org.apache.log4j.Logger;
+
+import javax.annotation.PostConstruct;
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.ws.rs.core.Response;
+
+@Named
+public class ShipperConfigManager extends JsonManagerBase {
+
+  private static final Logger logger = Logger.getLogger(ShipperConfigManager.class);
+  
+  @Inject
+  private LogSearchConfigState logSearchConfigState;
+
+  @Inject
+  private LogSearchConfigConfigurer logSearchConfigConfigurer;
+
+  @PostConstruct
+  private void postConstructor() {
+    logSearchConfigConfigurer.start();
+  }
+  
+  public List<String> getServices(String clusterName) {
+    if (!logSearchConfigState.isLogSearchConfigAvailable()) {
+      logger.warn("Log Search Config not available yet");
+      return null;
+    }
+    
+    return LogSearchConfigConfigurer.getConfig().getServices(clusterName);
+  }
+
+  public String getInputConfig(String clusterName, String serviceName) {
+    if (!logSearchConfigState.isLogSearchConfigAvailable()) {
+      logger.warn("Log Search Config not available yet");
+      return null;
+    }
+    
+    return LogSearchConfigConfigurer.getConfig().getInputConfig(clusterName, serviceName);
+  }
+
+  public Response setInputConfig(String clusterName, String serviceName, String inputConfig) {
+    if (!logSearchConfigState.isLogSearchConfigAvailable()) {
+      logger.warn("Log Search Config not available yet");
+      return Response.serverError().build();
+    }
+    
+    try {
+      LogSearchConfigConfigurer.getConfig().setInputConfig(clusterName, serviceName, inputConfig);
+      return Response.ok().build();
+    } catch (Exception e) {
+      logger.warn("Could not write input config", e);
+      return Response.serverError().build();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java
new file mode 100644
index 0000000..38821bc
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/rest/ShipperConfigResource.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.rest;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+import io.swagger.annotations.Api;
+import io.swagger.annotations.ApiOperation;
+import org.apache.ambari.logsearch.manager.ShipperConfigManager;
+import org.springframework.context.annotation.Scope;
+
+import java.util.List;
+
+import static org.apache.ambari.logsearch.doc.DocConstants.ShipperConfigOperationDescriptions.*;
+
+@Api(value = "shipper", description = "Shipper config operations")
+@Path("shipper")
+@Named
+@Scope("request")
+public class ShipperConfigResource {
+
+  @Inject
+  private ShipperConfigManager shipperConfigManager;
+
+  @GET
+  @Path("/input/{clusterName}/services")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SERVICE_NAMES)
+  public List<String> getServices(@PathParam("clusterName") String clusterName) {
+    return shipperConfigManager.getServices(clusterName);
+  }
+
+  @GET
+  @Path("/input/{clusterName}/{serviceName}")
+  @Produces({"application/json"})
+  @ApiOperation(GET_SHIPPER_CONFIG)
+  public String getShipperConfig(@PathParam("clusterName") String clusterName, @PathParam("serviceName") String serviceName) {
+    return shipperConfigManager.getInputConfig(clusterName, serviceName);
+  }
+
+  @PUT
+  @Path("/input/{clusterName}/{serviceName}")
+  @Produces("text/plain")
+  @ApiOperation(SET_SHIPPER_CONFIG)
+  public Response setShipperConfig(String body, @PathParam("clusterName") String clusterName, @PathParam("serviceName")
+    String serviceName) {
+    return shipperConfigManager.setInputConfig(clusterName, serviceName, body);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/ambari-logsearch-web/.gitignore
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/.gitignore b/ambari-logsearch/ambari-logsearch-web/.gitignore
new file mode 100644
index 0000000..ae3c172
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/.gitignore
@@ -0,0 +1 @@
+/bin/

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
index 068bc3a..d171803 100644
--- a/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
+++ b/ambari-logsearch/docker/test-config/logfeeder/logfeeder.properties
@@ -13,17 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+cluster.name=cl1
 logfeeder.checkpoint.folder=/root/checkpoints
 logfeeder.metrics.collector.hosts=
+logfeeder.config.dir=/root/test-config/logfeeder/shipper-conf/
 logfeeder.config.files=shipper-conf/global.config.json,\
-  shipper-conf/output.config.json,\
-  shipper-conf/input.config-zookeeper.json,\
-  shipper-conf/input.config-logsearch.json,\
-  shipper-conf/input.config-hst.json,\
-  shipper-conf/input.config-system_message.json,\
-  shipper-conf/input.config-secure_log.json,\
-  shipper-conf/input.config-hdfs.json,\
-  shipper-conf/input.config-ambari.json
+  shipper-conf/output.config.json
 logfeeder.log.filter.enable=true
 logfeeder.solr.config.interval=5
 logfeeder.solr.core.config.name=history
@@ -33,3 +28,4 @@ logfeeder.cache.size=100
 logfeeder.cache.key.field=log_message
 logfeeder.cache.dedup.interval=1000
 logfeeder.cache.last.dedup.enabled=true
+logsearch.config.zk_connect_string=localhost:9983

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties b/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
index cfa985d..684d1dc 100644
--- a/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
+++ b/ambari-logsearch/docker/test-config/logsearch/logsearch.properties
@@ -56,3 +56,5 @@ logsearch.auth.simple.enable=false
 logsearch.auth.external_auth.enable=false
 
 logsearch.protocol=http
+
+logsearch.config.zk_connect_string=localhost:9983

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-logsearch/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/pom.xml b/ambari-logsearch/pom.xml
index 1e63ced..6f78abe 100644
--- a/ambari-logsearch/pom.xml
+++ b/ambari-logsearch/pom.xml
@@ -34,6 +34,8 @@
     <module>ambari-logsearch-server</module>
     <module>ambari-logsearch-web</module>
     <module>ambari-logsearch-logfeeder</module>
+    <module>ambari-logsearch-config-api</module>
+    <module>ambari-logsearch-config-zookeeper</module>
     <module>ambari-logsearch-it</module>
   </modules>
   <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
index d9b9b57..8a4afe9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog300.java
@@ -23,8 +23,10 @@ import java.sql.SQLException;
 import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import javax.persistence.EntityManager;
 
@@ -139,6 +141,7 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
     addNewConfigurationsFromXml();
     showHcatDeletedUserMessage();
     setStatusOfStagesAndRequests();
+    updateLogSearchConfigs();
   }
 
   protected void showHcatDeletedUserMessage() {
@@ -274,4 +277,36 @@ public class UpgradeCatalog300 extends AbstractUpgradeCatalog {
     // the cluster configuration mapping table
     dbAccessor.dropTable(CLUSTER_CONFIG_MAPPING_TABLE);
   }
+  
+  /**
+   * Updates Log Search configs.
+   *
+   * @throws AmbariException
+   */
+  protected void updateLogSearchConfigs() throws AmbariException {
+    AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class);
+    Clusters clusters = ambariManagementController.getClusters();
+    if (clusters != null) {
+      Map<String, Cluster> clusterMap = clusters.getClusters();
+
+      if (clusterMap != null && !clusterMap.isEmpty()) {
+        for (final Cluster cluster : clusterMap.values()) {
+          Collection<Config> configs = cluster.getAllConfigs();
+          for (Config config : configs) {
+            String configType = config.getType();
+            if (!configType.endsWith("-logsearch-conf")) {
+              continue;
+            }
+            
+            Set<String> removeProperties = new HashSet<>();
+            removeProperties.add("service_name");
+            removeProperties.add("component_mappings");
+            removeProperties.add("content");
+            
+            removeConfigurationPropertiesFromCluster(cluster, configType, removeProperties);
+          }
+        }
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
deleted file mode 100644
index ff4f695..0000000
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-logsearch-conf.xml
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Accumulo</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>ACCUMULO_MASTER:accumulo_master;ACCUMULO_MONITOR:accumulo_monitor;ACCUMULO_GC:accumulo_gc;ACCUMULO_TRACER:accumulo_tracer;ACCUMULO_TSERVER:accumulo_tserver</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"accumulo_gc",
-      "rowtype":"service",
-      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/gc_*.log"
-    },
-    {
-      "type":"accumulo_master",
-      "rowtype":"service",
-      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/master_*.log"
-    },
-    {
-      "type":"accumulo_monitor",
-      "rowtype":"service",
-      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/monitor_*.log"
-    },
-    {
-      "type":"accumulo_tracer",
-      "rowtype":"service",
-      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tracer_*.log"
-    },
-    {
-      "type":"accumulo_tserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tserver_*.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "accumulo_master"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "comment":"This one has one extra space after LEVEL",
-      "conditions":{
-        "fields":{
-          "type":[
-            "accumulo_gc",
-            "accumulo_monitor",
-            "accumulo_tracer",
-            "accumulo_tserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/input.config-accumulo.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/input.config-accumulo.json.j2 b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/input.config-accumulo.json.j2
new file mode 100644
index 0000000..d093732
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/templates/input.config-accumulo.json.j2
@@ -0,0 +1,92 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"accumulo_gc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/gc_*.log"
+    },
+    {
+      "type":"accumulo_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/master_*.log"
+    },
+    {
+      "type":"accumulo_monitor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/monitor_*.log"
+    },
+    {
+      "type":"accumulo_tracer",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tracer_*.log"
+    },
+    {
+      "type":"accumulo_tserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/accumulo-env/accumulo_log_dir', '/var/log/accumulo')}}/tserver_*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "accumulo_master"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "comment":"This one has one extra space after LEVEL",
+      "conditions":{
+        "fields":{
+          "type":[
+            "accumulo_gc",
+            "accumulo_monitor",
+            "accumulo_tracer",
+            "accumulo_tserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
deleted file mode 100644
index 1de8c46..0000000
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/configuration/infra-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Infra</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>INFRA_SOLR:infra_solr</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"infra_solr",
-      "rowtype":"service",
-      "path":"{{default('/configurations/infra-solr-env/infra_solr_log_dir', '/var/log/ambari-infra-solr')}}/solr.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "infra_solr"
-          ]
-        }
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-      }
-    }
-  ]
-}
-  </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/input.config-ambari-infra.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/input.config-ambari-infra.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/input.config-ambari-infra.json.j2
new file mode 100644
index 0000000..af530e7
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA/0.1.0/package/templates/input.config-ambari-infra.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"infra_solr",
+      "rowtype":"service",
+      "path":"{{default('/configurations/infra-solr-env/infra_solr_log_dir', '/var/log/ambari-infra-solr')}}/solr.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "infra_solr"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
deleted file mode 100644
index 72d44db..0000000
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-logsearch-conf.xml
+++ /dev/null
@@ -1,201 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>AMS</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>METRICS_COLLECTOR:ams_collector,ams_hbase_master,ams_hbase_regionserver;METRICS_MONITOR:ams_monitor;METRICS_GRAFANA:ams_grafana</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"ams_hbase_master",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-master-*.log"
-    },
-    {
-      "type":"ams_hbase_regionserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-regionserver-*.log"
-    },
-    {
-      "type":"ams_collector",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/ambari-metrics-collector.log"
-    },
-    {
-      "type":"ams_monitor",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ams-env/metrics_monitor_log_dir', '/var/log/ambari-metrics-monitor')}}/ambari-metrics-monitor.out"
-    },
-    {
-      "type":"ams_grafana",
-      "rowtype":"service",
-      "path":"{{default('/configurations/ams-grafana-env/metrics_grafana_log_dir', '/var/log/ambari-metrics-grafana')}}/grafana.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_collector"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %p %c: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_hbase_master",
-            "ams_hbase_regionserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_grafana"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{DATESTAMP:logtime})",
-      "message_pattern":"(?m)^%{DATESTAMP:logtime}%{SPACE}\\[%{WORD:level}\\]%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy/MM/dd HH:mm:ss"
-          }
-         },
-        "level":[
-          {
-            "map_fieldvalue":{
-              "pre_value":"I",
-              "post_value":"INFO"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"W",
-              "post_value":"WARN"
-            }
-          },
-          {
-            "map_fieldvalue":{
-              "pre_value":"D",
-              "post_value":"DEBUG"
-             }
-           },
-           {
-             "map_fieldvalue":{
-               "pre_value":"E",
-               "post_value":"ERROR"
-             }
-           },
-           {
-             "map_fieldvalue":{
-               "pre_value":"F",
-               "post_value":"FATAL"
-             }
-           }
-         ]
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "ams_monitor"
-          ]
-        }
-      },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       },
-      "level":[
-        {
-          "map_fieldvalue":{
-            "pre_value":"WARNING",
-            "post_value":"WARN"
-          }
-        }
-      ]
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>


[3/5] ambari git commit: AMBARI-20578 Log Search Configuration API (mgergely)

Posted by mg...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/input.config-ambari-metrics.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/input.config-ambari-metrics.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/input.config-ambari-metrics.json.j2
new file mode 100644
index 0000000..ef823b2
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/input.config-ambari-metrics.json.j2
@@ -0,0 +1,169 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"ams_hbase_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-master-*.log"
+    },
+    {
+      "type":"ams_hbase_regionserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/hbase-ams-regionserver-*.log"
+    },
+    {
+      "type":"ams_collector",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_collector_log_dir', '/var/log/ambari-metrics-collector')}}/ambari-metrics-collector.log"
+    },
+    {
+      "type":"ams_monitor",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-env/metrics_monitor_log_dir', '/var/log/ambari-metrics-monitor')}}/ambari-metrics-monitor.out"
+    },
+    {
+      "type":"ams_grafana",
+      "rowtype":"service",
+      "path":"{{default('/configurations/ams-grafana-env/metrics_grafana_log_dir', '/var/log/ambari-metrics-grafana')}}/grafana.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_collector"
+          ]
+         }
+      },
+      "log4j_format":"%d{ISO8601} %p %c: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_hbase_master",
+            "ams_hbase_regionserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_grafana"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{DATESTAMP:logtime})",
+      "message_pattern":"(?m)^%{DATESTAMP:logtime}%{SPACE}\\[%{WORD:level}\\]%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy/MM/dd HH:mm:ss"
+          }
+        },
+        "level":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"I",
+              "post_value":"INFO"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"W",
+              "post_value":"WARN"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"D",
+              "post_value":"DEBUG"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"E",
+              "post_value":"ERROR"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"F",
+              "post_value":"FATAL"
+            }
+          }
+        ]
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "ams_monitor"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{JAVAFILE:file}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      },
+      "level":[
+        {
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+        }
+      ]
+    }
+  ]
+ }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
deleted file mode 100644
index 71a08fb..0000000
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/configuration/atlas-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Atlas</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>ATLAS_SERVER:atlas_app</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"atlas_app",
-      "rowtype":"service",
-      "path":"{{default('/configurations/atlas-env/metadata_log_dir', '/var/log/atlas')}}/application.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "atlas_app"
-          ]
-         }
-       },
-      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/templates/input.config-atlas.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/templates/input.config-atlas.json.j2 b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/templates/input.config-atlas.json.j2
new file mode 100644
index 0000000..2d977b9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/templates/input.config-atlas.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"atlas_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/atlas-env/metadata_log_dir', '/var/log/atlas')}}/application.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "atlas_app"
+          ]
+        }
+      },
+      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
deleted file mode 100644
index 6b43ba6..0000000
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Falcon</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>FALCON_SERVER:falcon_app</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"falcon_app",
-      "rowtype":"service",
-      "path":"{{default('/configurations/falcon-env/falcon_log_dir', '/var/log/falcon')}}/falcon.application.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "falcon_app"
-          ]
-         }
-       },
-      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/templates/input.config-falcon.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/templates/input.config-falcon.json.j2 b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/templates/input.config-falcon.json.j2
new file mode 100644
index 0000000..7c5aede
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/templates/input.config-falcon.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"falcon_app",
+      "rowtype":"service",
+      "path":"{{default('/configurations/falcon-env/falcon_log_dir', '/var/log/falcon')}}/falcon.application.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "falcon_app"
+          ]
+        }
+      },
+      "log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
deleted file mode 100644
index 98e6db8..0000000
--- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/configuration/flume-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Flume</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>FLUME:flume_handler</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"flume_handler",
-      "rowtype":"service",
-      "path":"{{default('/configurations/flume-env/flume_log_dir', '/var/log/flume')}}/flume.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "flume_handler"
-          ]
-         }
-       },
-      "log4j_format":"",
-      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}\\(%{JAVACLASS:class}\\.%{JAVAMETHOD:method}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/input.config-flume.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/input.config-flume.json.j2 b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/input.config-flume.json.j2
new file mode 100644
index 0000000..12d7aad
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/templates/input.config-flume.json.j2
@@ -0,0 +1,53 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{# flume.conf: Add your flume configuration here and start flume
+#             Note if you are using the Windows srvice or Unix service
+#             provided by the HDP distribution, they will assume the
+#             agent's name in this file to be 'a1'
+#}
+{
+  "input":[
+    {
+      "type":"flume_handler",
+      "rowtype":"service",
+      "path":"{{default('/configurations/flume-env/flume_log_dir', '/var/log/flume')}}/flume.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "flume_handler"
+          ]
+        }
+      },
+      "log4j_format":"",
+      "multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
+      "message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}\\(%{JAVACLASS:class}\\.%{JAVAMETHOD:method}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
deleted file mode 100644
index 321ea4e..0000000
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/configuration/hbase-logsearch-conf.xml
+++ /dev/null
@@ -1,111 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>HBase</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>HBASE_MASTER:hbase_master;HBASE_REGIONSERVER:hbase_regionserver;PHOENIX_QUERY_SERVER:hbase_phoenix_server</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"hbase_master",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-master-*.log"
-    },
-    {
-      "type":"hbase_regionserver",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-regionserver-*.log"
-    },
-    {
-      "type":"hbase_phoenix_server",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/phoenix-*-server.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_master",
-            "hbase_regionserver"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hbase_phoenix_server"
-          ]
-         }
-      },
-      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/input.config-hbase.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/input.config-hbase.json.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/input.config-hbase.json.j2
new file mode 100644
index 0000000..94fbc64
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/input.config-hbase.json.j2
@@ -0,0 +1,79 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"hbase_master",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-master-*.log"
+    },
+    {
+      "type":"hbase_regionserver",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/hbase-*-regionserver-*.log"
+    },
+    {
+      "type":"hbase_phoenix_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hbase-env/hbase_log_dir', '/var/log/hbase')}}/phoenix-*-server.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_master",
+            "hbase_regionserver"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hbase_phoenix_server"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
deleted file mode 100644
index d85a028..0000000
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-logsearch-conf.xml
+++ /dev/null
@@ -1,248 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>HDFS</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>NAMENODE:hdfs_namenode;DATANODE:hdfs_datanode;SECONDARY_NAMENODE:hdfs_secondarynamenode;JOURNALNODE:hdfs_journalnode;ZKFC:hdfs_zkfc;NFS_GATEWAY:hdfs_nfs3</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"hdfs_datanode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-datanode-*.log"
-    },
-    {
-      "type":"hdfs_namenode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-namenode-*.log"
-    },
-    {
-      "type":"hdfs_journalnode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-journalnode-*.log"
-    },
-    {
-      "type":"hdfs_secondarynamenode",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-secondarynamenode-*.log"
-    },
-    {
-      "type":"hdfs_zkfc",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-zkfc-*.log"
-    },
-    {
-      "type":"hdfs_nfs3",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-nfs3-*.log"
-    },
-    {
-      "type":"hdfs_audit",
-      "rowtype":"audit",
-      "is_enabled":"true",
-      "add_fields":{
-        "logType":"HDFSAudit",
-        "enforcer":"hadoop-acl",
-        "repoType":"1",
-        "repo":"hdfs"
-      },
-      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hdfs-audit.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_datanode",
-            "hdfs_journalnode",
-            "hdfs_secondarynamenode",
-            "hdfs_namenode",
-            "hdfs_zkfc",
-            "hdfs_nfs3"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "evtTime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"keyvalue",
-      "sort_order":1,
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "source_field":"log_message",
-      "value_split":"=",
-      "field_split":"\t",
-      "post_map_values":{
-        "src":{
-          "map_fieldname":{
-            "new_fieldname":"resource"
-          }
-         },
-        "ip":{
-          "map_fieldname":{
-            "new_fieldname":"cliIP"
-          }
-         },
-        "allowed":[
-          {
-            "map_fieldvalue":{
-              "pre_value":"true",
-              "post_value":"1"
-            }
-           },
-          {
-            "map_fieldvalue":{
-              "pre_value":"false",
-              "post_value":"0"
-            }
-           },
-          {
-            "map_fieldname":{
-              "new_fieldname":"result"
-            }
-           }
-         ],
-        "cmd":{
-          "map_fieldname":{
-            "new_fieldname":"action"
-          }
-         },
-        "proto":{
-          "map_fieldname":{
-            "new_fieldname":"cliType"
-          }
-         },
-        "callerContext":{
-          "map_fieldname":{
-            "new_fieldname":"req_caller_id"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "sort_order":2,
-      "source_field":"ugi",
-      "remove_source_field":"false",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hdfs_audit"
-          ]
-         }
-       },
-      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
-      "post_map_values":{
-        "user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "x_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "p_user":{
-          "map_fieldname":{
-            "new_fieldname":"reqUser"
-          }
-         },
-        "k_user":{
-          "map_fieldname":{
-            "new_fieldname":"proxyUsers"
-          }
-         },
-        "p_authType":{
-          "map_fieldname":{
-            "new_fieldname":"authType"
-          }
-         },
-        "k_authType":{
-          "map_fieldname":{
-            "new_fieldname":"proxyAuthType"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/input.config-hdfs.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/input.config-hdfs.json.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/input.config-hdfs.json.j2
new file mode 100644
index 0000000..ed7abf6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/input.config-hdfs.json.j2
@@ -0,0 +1,216 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"hdfs_datanode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-datanode-*.log"
+    },
+    {
+      "type":"hdfs_namenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-namenode-*.log"
+    },
+    {
+      "type":"hdfs_journalnode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-journalnode-*.log"
+    },
+    {
+      "type":"hdfs_secondarynamenode",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-secondarynamenode-*.log"
+    },
+    {
+      "type":"hdfs_zkfc",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-zkfc-*.log"
+    },
+    {
+      "type":"hdfs_nfs3",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hadoop-{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}-nfs3-*.log"
+    },
+    {
+      "type":"hdfs_audit",
+      "rowtype":"audit",
+      "is_enabled":"true",
+      "add_fields":{
+        "logType":"HDFSAudit",
+        "enforcer":"hadoop-acl",
+        "repoType":"1",
+        "repo":"hdfs"
+      },
+      "path":"{{default('/configurations/hadoop-env/hdfs_log_dir_prefix', '/var/log/hadoop')}}/{{default('configurations/hadoop-env/hdfs_user', 'hdfs')}}/hdfs-audit.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_datanode",
+            "hdfs_journalnode",
+            "hdfs_secondarynamenode",
+            "hdfs_namenode",
+            "hdfs_zkfc",
+            "hdfs_nfs3"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "evtTime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"keyvalue",
+      "sort_order":1,
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "source_field":"log_message",
+      "value_split":"=",
+      "field_split":"\t",
+      "post_map_values":{
+        "src":{
+          "map_fieldname":{
+            "new_fieldname":"resource"
+          }
+        },
+        "ip":{
+          "map_fieldname":{
+            "new_fieldname":"cliIP"
+          }
+        },
+        "allowed":[
+          {
+            "map_fieldvalue":{
+              "pre_value":"true",
+              "post_value":"1"
+            }
+          },
+          {
+            "map_fieldvalue":{
+              "pre_value":"false",
+              "post_value":"0"
+            }
+          },
+          {
+            "map_fieldname":{
+              "new_fieldname":"result"
+            }
+          }
+        ],
+        "cmd":{
+          "map_fieldname":{
+            "new_fieldname":"action"
+          }
+        },
+        "proto":{
+          "map_fieldname":{
+            "new_fieldname":"cliType"
+          }
+        },
+        "callerContext":{
+          "map_fieldname":{
+            "new_fieldname":"req_caller_id"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "sort_order":2,
+      "source_field":"ugi",
+      "remove_source_field":"false",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hdfs_audit"
+          ]
+        }
+      },
+      "message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
+      "post_map_values":{
+        "user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "x_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "p_user":{
+          "map_fieldname":{
+            "new_fieldname":"reqUser"
+          }
+        },
+        "k_user":{
+          "map_fieldname":{
+            "new_fieldname":"proxyUsers"
+          }
+        },
+        "p_authType":{
+          "map_fieldname":{
+            "new_fieldname":"authType"
+          }
+        },
+        "k_authType":{
+          "map_fieldname":{
+            "new_fieldname":"proxyAuthType"
+          }
+        }
+      }
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
deleted file mode 100644
index c1b971c..0000000
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-logsearch-conf.xml
+++ /dev/null
@@ -1,117 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Hive</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>HIVE_METASTORE:hive_metastore;HIVE_SERVER:hive_hiveserver2;WEBHCAT_SERVER:webhcat_server</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"hive_hiveserver2",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hiveserver2.log"
-    },
-    {
-      "type":"hive_metastore",
-      "rowtype":"service",
-      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hivemetastore.log"
-    },
-    {
-      "type": "webhcat_server",
-      "rowntype":"service",
-      "path":"{{default('configurations/hive-env/hcat_log_dir', '/var/log/webhcat')}}/webhcat.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "hive_hiveserver2",
-            "hive_metastore"
-          ]
-         }
-       },
-      "log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "webhcat_server"
-          ]
-         }
-       },
-      "log4j_format":" %-5p | %d{DATE} | %c | %m%n",
-      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime})",
-      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime}%{CUSTOM_SEPARATOR}%{JAVACLASS:file}%{CUSTOM_SEPARATOR}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
-          }
-         },
-        "level":{
-           "map_fieldvalue":{
-             "pre_value":"WARNING",
-             "post_value":"WARN"
-            }
-        }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2 b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2
new file mode 100644
index 0000000..8697cf5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/input.config-hive.json.j2
@@ -0,0 +1,85 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"hive_hiveserver2",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hiveserver2.log"
+    },
+    {
+      "type":"hive_metastore",
+      "rowtype":"service",
+      "path":"{{default('/configurations/hive-env/hive_log_dir', '/var/log/hive')}}/hivemetastore.log"
+    },
+    {
+      "type": "webhcat_server",
+      "rowntype":"service",
+      "path":"{{default('configurations/hive-env/hcat_log_dir', '/var/log/webhcat')}}/webhcat.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "hive_hiveserver2",
+            "hive_metastore"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "webhcat_server"
+          ]
+        }
+      },
+      "log4j_format":" %-5p | %d{DATE} | %c | %m%n",
+      "multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime})",
+      "message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{CUSTOM_SEPARATOR}%{CUSTOM_DATESTAMP:logtime}%{CUSTOM_SEPARATOR}%{JAVACLASS:file}%{CUSTOM_SEPARATOR}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"dd MMM yyyy HH:mm:ss,SSS"
+          }
+        },
+        "level":{
+          "map_fieldvalue":{
+            "pre_value":"WARNING",
+            "post_value":"WARN"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-logsearch-conf.xml
deleted file mode 100644
index 29a8b36..0000000
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-logsearch-conf.xml
+++ /dev/null
@@ -1,124 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Kafka</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>KAFKA_BROKER:kafka_server,kafka_request,kafka_logcleaner,kafka_controller,kafka_statechange</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"kafka_controller",
-      "rowtype":"service",
-      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/controller.log"
-    },
-    {
-      "type":"kafka_request",
-      "rowtype":"service",
-      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/kafka-request.log"
-    },
-    {
-      "type":"kafka_logcleaner",
-      "rowtype":"service",
-      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/log-cleaner.log"
-    },
-    {
-      "type":"kafka_server",
-      "rowtype":"service",
-      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/server.log"
-    },
-    {
-      "type":"kafka_statechange",
-      "rowtype":"service",
-      "path":"{{kafka_log_dir}}/state-change.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "kafka_controller",
-            "kafka_request",
-            "kafka_logcleaner"
-          ]
-         }
-       },
-      "log4j_format":"[%d] %p %m (%c)%n",
-      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     },
-    {
-      "filter":"grok",
-      "comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
-      "conditions":{
-        "fields":{
-          "type":[
-            "kafka_server",
-            "kafka_statechange"
-          ]
-         }
-       },
-      "log4j_format":"[%d] %p %m (%c)%n",
-      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/input.config-kafka.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/input.config-kafka.json.j2 b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/input.config-kafka.json.j2
new file mode 100644
index 0000000..5b8f896
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/input.config-kafka.json.j2
@@ -0,0 +1,92 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"kafka_controller",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/controller.log"
+    },
+    {
+      "type":"kafka_request",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/kafka-request.log"
+    },
+    {
+      "type":"kafka_logcleaner",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/log-cleaner.log"
+    },
+    {
+      "type":"kafka_server",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/server.log"
+    },
+    {
+      "type":"kafka_statechange",
+      "rowtype":"service",
+      "path":"{{default('/configurations/kafka-env/kafka_log_dir', '/var/log/kafka')}}/state-change.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "kafka_controller",
+            "kafka_request",
+            "kafka_logcleaner"
+          ]
+        }
+      },
+      "log4j_format":"[%d] %p %m (%c)%n",
+      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    },
+    {
+      "filter":"grok",
+      "comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
+      "conditions":{
+        "fields":{
+          "type":[
+            "kafka_server",
+            "kafka_statechange"
+          ]
+        }
+      },
+      "log4j_format":"[%d] %p %m (%c)%n",
+      "multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      "message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
deleted file mode 100644
index 528b011..0000000
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/configuration/knox-logsearch-conf.xml
+++ /dev/null
@@ -1,93 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Knox</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>KNOX_GATEWAY:knox_gateway,knox_cli,knox_ldap</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"knox_gateway",
-      "rowtype":"service",
-      "path":"/var/log/knox/gateway.log"
-    },
-    {
-      "type":"knox_cli",
-      "rowtype":"service",
-      "path":"/var/log/knox/knoxcli.log"
-    },
-    {
-      "type":"knox_ldap",
-      "rowtype":"service",
-      "path":"/var/log/knox/ldap.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "knox_gateway",
-            "knox_cli",
-            "knox_ldap"
-          ]
-
-        }
-       },
-      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/input.config-knox.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/input.config-knox.json.j2 b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/input.config-knox.json.j2
new file mode 100644
index 0000000..6d7cf72
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/templates/input.config-knox.json.j2
@@ -0,0 +1,60 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"knox_gateway",
+      "rowtype":"service",
+      "path":"/var/log/knox/gateway.log"
+    },
+    {
+      "type":"knox_cli",
+      "rowtype":"service",
+      "path":"/var/log/knox/knoxcli.log"
+    },
+    {
+      "type":"knox_ldap",
+      "rowtype":"service",
+      "path":"/var/log/knox/ldap.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "knox_gateway",
+            "knox_cli",
+            "knox_ldap"
+          ]
+        }
+      },
+      "log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
+      "multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
+      "message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
index 19bbd4a..fce4c10 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-ambari-config.xml
@@ -31,6 +31,7 @@
       <show-property-name>false</show-property-name>
       <property-file-name>input.config-ambari.json.j2</property-file-name>
       <property-file-type>text</property-file-type>
+      <visible>false</visible>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-custom-logsearch-conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-custom-logsearch-conf.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-custom-logsearch-conf.xml
deleted file mode 100644
index 30a93a5..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-custom-logsearch-conf.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_component_mappings</name>
-    <display-name>Service Component mapping</display-name>
-    <description>Log Search service component logid mapping list (e.g.: SERVICE1=S1_COMPONENT1:logid1,logid2;S1_COMPONENT2:logid3|SERVICE2=...)</description>
-    <value></value>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Log Feeder Config</display-name>
-    <description>Metadata jinja template for Log Feeder which contains grok patterns for reading service specific logs.</description>
-    <value>
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index d00be4e..84a6ad9 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -81,7 +81,6 @@ cluster_name = str(config['clusterName'])
 configurations = config['configurations'] # need reference inside logfeeder jinja templates
 logserch_meta_configs = get_logsearch_meta_configs(configurations)
 logsearch_metadata = get_logsearch_metadata(logserch_meta_configs)
-logfeeder_metadata = get_logfeeder_metadata(logserch_meta_configs)
 
 # for now just pick first collector
 if 'metrics_collector_hosts' in config['clusterHostInfo']:
@@ -256,14 +255,6 @@ logsearch_properties['logsearch.auth.simple.enabled'] = 'false'
 
 logsearch_properties['logsearch.protocol'] = logsearch_ui_protocol
 
-logsearch_acls = ''
-if 'infra-solr-env' in config['configurations'] and security_enabled and not logsearch_use_external_solr:
-  acl_infra_solr_principal = get_name_from_principal(config['configurations']['infra-solr-env']['infra_solr_kerberos_principal'])
-  acl_logsearch_principal = get_name_from_principal(config['configurations']['logsearch-env']['logsearch_kerberos_principal'])
-  logsearch_acls = format('world:anyone:r,sasl:{acl_infra_solr_principal}:cdrwa,sasl:{acl_logsearch_principal}:cdrwa')
-  logsearch_properties['logsearch.solr.zk.acls'] = logsearch_acls
-  logsearch_properties['logsearch.solr.audit.logs.zk.acls'] = logsearch_acls
-
 # load config values
 
 logsearch_properties = dict(logsearch_properties.items() + dict(config['configurations']['logsearch-properties']).items())
@@ -279,6 +270,9 @@ logsearch_properties['logsearch.solr.metrics.collector.hosts'] = format(logsearc
 logsearch_properties['logsearch.auth.external_auth.host_url'] = format(logsearch_properties['logsearch.auth.external_auth.host_url'])
 logsearch_properties['logsearch.spnego.kerberos.host'] = logsearch_spnego_host
 
+if not('logsearch.config.zk_connect_string' in logsearch_properties):
+  logsearch_properties['logsearch.config.zk_connect_string'] = logsearch_solr_zk_quorum
+
 if logsearch_solr_kerberos_enabled:
   logsearch_properties['logsearch.solr.kerberos.enable'] = 'true'
   logsearch_properties['logsearch.solr.jaas.file'] = logsearch_jaas_file
@@ -330,17 +324,7 @@ logfeeder_env_jceks_file = os.path.join(logsearch_logfeeder_conf, 'logfeeder.jce
 logfeeder_ambari_config_content = config['configurations']['logfeeder-ambari-config']['content']
 logfeeder_output_config_content = config['configurations']['logfeeder-output-config']['content']
 
-logfeeder_default_services = ['logsearch']
-logfeeder_default_config_file_names = ['global.config.json'] + ['input.config-%s.json' % (tag) for tag in logfeeder_default_services]
-logfeeder_custom_config_file_names = ['input.config-%s.json' % (tag.replace('-logsearch-conf', ''))
-                                      for tag, content in logfeeder_metadata.iteritems() if any(logfeeder_metadata)]
-
-if logfeeder_system_log_enabled:
-  default_config_files = ','.join(['output.config.json','input.config-ambari.json'] + logfeeder_default_config_file_names + logfeeder_custom_config_file_names
-                                  + ['input.config-system_messages.json', 'input.config-secure_log.json'])
-else:
-  default_config_files = ','.join(['output.config.json','input.config-ambari.json'] + logfeeder_default_config_file_names + logfeeder_custom_config_file_names)
-
+default_config_files = ','.join(['output.config.json','global.config.json'])
 
 logfeeder_grok_patterns = config['configurations']['logfeeder-grok']['default_grok_patterns']
 if config['configurations']['logfeeder-grok']['custom_grok_patterns'].strip():
@@ -367,14 +351,19 @@ logfeeder_properties = dict(logfeeder_properties.items() + dict(config['configur
 
 # load derivated values
 
-logfeeder_properties['logfeeder.metrics.collector.hosts'] = format(logfeeder_properties['logfeeder.metrics.collector.hosts'])
+logfeeder_properties['cluster.name'] = cluster_name
+logfeeder_properties['logfeeder.config.dir'] = logsearch_logfeeder_conf
 logfeeder_properties['logfeeder.config.files'] = format(logfeeder_properties['logfeeder.config.files'])
 logfeeder_properties['logfeeder.solr.zk_connect_string'] = logsearch_solr_zk_quorum + logsearch_solr_zk_znode
 
+logfeeder_properties['logfeeder.metrics.collector.hosts'] = format(logfeeder_properties['logfeeder.metrics.collector.hosts'])
 logfeeder_properties['logfeeder.metrics.collector.protocol'] = metrics_collector_protocol
 logfeeder_properties['logfeeder.metrics.collector.port'] = metrics_collector_port
 logfeeder_properties['logfeeder.metrics.collector.path'] = '/ws/v1/timeline/metrics'
 
+if not('logsearch.config.zk_connect_string' in logfeeder_properties):
+  logfeeder_properties['logsearch.config.zk_connect_string'] = logsearch_solr_zk_quorum
+
 if logsearch_solr_kerberos_enabled:
   if 'logfeeder.solr.kerberos.enable' not in logfeeder_properties:
     logfeeder_properties['logfeeder.solr.kerberos.enable'] = 'true'
@@ -387,6 +376,19 @@ logfeeder_checkpoint_folder = logfeeder_properties['logfeeder.checkpoint.folder'
 
 logfeeder_use_ssl = logsearch_solr_ssl_enabled or metrics_collector_protocol == 'https'
 
+
+logsearch_acls = ''
+if 'infra-solr-env' in config['configurations'] and security_enabled and not logsearch_use_external_solr:
+  acl_infra_solr_principal = get_name_from_principal(config['configurations']['infra-solr-env']['infra_solr_kerberos_principal'])
+  acl_logsearch_principal = get_name_from_principal(config['configurations']['logsearch-env']['logsearch_kerberos_principal'])
+  logsearch_acls = format('world:anyone:r,sasl:{acl_infra_solr_principal}:cdrwa,sasl:{acl_logsearch_principal}:cdrwa')
+  logsearch_properties['logsearch.solr.zk.acls'] = logsearch_acls
+  logsearch_properties['logsearch.solr.audit.logs.zk.acls'] = logsearch_acls
+  if not('logsearch.config.zk_acls' in logsearch_properties):
+    logsearch_properties['logsearch.config.zk_acls'] = logsearch_acls
+  if not('logsearch.config.zk_acls' in logfeeder_properties):
+    logfeeder_properties['logsearch.config.zk_acls'] = logsearch_acls
+
 #####################################
 # Smoke command
 #####################################

http://git-wip-us.apache.org/repos/asf/ambari/blob/0ac0ba42/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
index e6e55b9..653d604 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py
@@ -99,6 +99,10 @@ def setup_logfeeder():
        encoding="utf-8"
        )
 
+  File(format("{logsearch_logfeeder_conf}/global.config.json"),
+       content=Template("global.config.json.j2")
+       )
+
   File(format("{logsearch_logfeeder_conf}/input.config-ambari.json"),
        content=InlineTemplate(params.logfeeder_ambari_config_content),
        encoding="utf-8"
@@ -109,17 +113,6 @@ def setup_logfeeder():
        encoding="utf-8"
        )
 
-  for file_name in params.logfeeder_default_config_file_names:
-    File(format("{logsearch_logfeeder_conf}/" + file_name),
-         content=Template(file_name + ".j2")
-         )
-
-  File(format("{logsearch_logfeeder_conf}/input.config-logfeeder-custom.json"), action='delete')
-  for service, pattern_content in params.logfeeder_metadata.iteritems():
-    File(format("{logsearch_logfeeder_conf}/input.config-" + service.replace('-logsearch-conf', '') + ".json"),
-      content=InlineTemplate(pattern_content, extra_imports=[default])
-    )
-
   if params.logfeeder_system_log_enabled:
     File(format("{logsearch_logfeeder_conf}/input.config-system_messages.json"),
          content=params.logfeeder_system_messages_content