You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ol...@apache.org on 2016/06/21 17:13:25 UTC

[1/2] ambari git commit: AMBARI-17117. Fix misnamed Zookeeper connect strings in Log Search (Miklos Gergely via oleewere)

Repository: ambari
Updated Branches:
  refs/heads/trunk 6c2736acd -> a2c23b213


http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-server/src/test/python/stacks/2.4/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.4/configs/default.json b/ambari-server/src/test/python/stacks/2.4/configs/default.json
index ff548e0..848be40 100644
--- a/ambari-server/src/test/python/stacks/2.4/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.4/configs/default.json
@@ -302,7 +302,7 @@
         "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!--\n  Licensed to the Apache Software Foundation (ASF) under one or more\n  contributor license agreements.  See the NOTICE file distributed with\n  this work for additional information regarding copyright ownership.\n  The ASF licenses this file to You under the Apache License, Version 2.0\n  (the \"License\"); you may not use this file except in compliance with\n  the License.  You may obtain a copy of the License at\n\n      http://www.apache.org/licenses/LICENSE-2.0\n\n  Unless required by applicable law or agreed to in writing, software\n  distributed under the License is distributed on an \"AS IS\" BASIS,\n  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n  See the License for the specific language governing permissions and\n  limitations under the License.\n-->\n<!DOCTYPE log4j:configuration SYSTEM \"log4j.dtd\">\n<log4j:configuration xmlns:log4j=\"http://jakarta.
 apache.org/log4j/\">\n  <appender name=\"console\" class=\"org.apache.log4j.ConsoleAppender\">\n    <param name=\"Target\" value=\"System.out\" />\n    <layout class=\"org.apache.log4j.PatternLayout\">\n      <param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\" />\n    </layout>\n  </appender>\n\n  <appender name=\"rolling_file\" class=\"org.apache.log4j.RollingFileAppender\"> \n    <param name=\"file\" value=\"{{logfeeder_log_dir}}/logfeeder.log\" />\n    <param name=\"append\" value=\"true\" /> \n    <param name=\"maxFileSize\" value=\"10MB\" /> \n    <param name=\"maxBackupIndex\" value=\"10\" /> \n    <layout class=\"org.apache.log4j.PatternLayout\"> \n      <param name=\"ConversionPattern\" value=\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\"/> \n    </layout> \n  </appender> \n\n  <category name=\"org.apache.ambari.logfeeder\" additivity=\"false\">\n    <priori
 ty value=\"info\" />\n    <appender-ref ref=\"rolling_file\" />\n  </category>\n\n  <root>\n    <priority value=\"warn\" />\n    <appender-ref ref=\"rolling_file\" />\n  </root>\n</log4j:configuration>"
       },
       "logfeeder-input-configs": {
-        "content": "{\n  \"global\":{\n    \"add_fields\":{\n      \"cluster\":\"{{cluster_name}}\"\n    },\n    \"source\":\"file\",\n    \"tail\":\"true\",\n    \"gen_event_md5\":\"true\",\n    \"start_position\":\"beginning\"\n  },\n  \"input\":[\n    {\n      \"type\":\"accumulo_gc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/gc_*.log\"\n    },\n    {\n      \"type\":\"accumulo_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/master_*.log\"\n    },\n    {\n      \"type\":\"accumulo_monitor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/monitor_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tracer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tracer_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tserver_*.log\"\n    },\n    {\n      \"type\":\"atlas_app\",\n      \"rowtype\":\"se
 rvice\",\n      \"path\":\"{{atlas_log_dir}}/application.log\"\n    },\n    {\n      \"type\":\"ambari_agent\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_agent_log_dir}}/ambari-agent.log\"\n    },\n    {\n      \"type\":\"ambari_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_server_log_dir}}/ambari-server.log\"\n    },\n    {\n      \"type\":\"ams_hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-master-*.log\"\n    },\n    {\n      \"type\":\"ams_hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log\"\n    },\n    {\n      \"type\":\"ams_collector\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/ambari-metrics-collector.log\"\n    },\n    {\n      \"type\":\"falcon_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{falcon_log_dir}}/falcon.application.log\"\n    },\n    {\
 n      \"type\":\"hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-master-*.log\"\n    },\n    {\n      \"type\":\"hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log\"\n    },\n    {\n      \"type\":\"hdfs_datanode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_namenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_journalnode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_secondarynamenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_zkfc\",\n      \"r
 owtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log\"\n    },\n    {\n      \"type\":\"hive_hiveserver2\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hiveserver2.log\"\n    },\n    {\n      \"type\":\"hive_metastore\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hivemetastore.log\"\n    },\n    {\n      \"type\":\"kafka_controller\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/controller.log\"\n    },\n    {\n      \"type\":\"kafka_request\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/kafka-request.log\"\n    },\n    {\n      \"type\":\"kafka_logcleaner\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/log-cleaner.log\"\n    },\n    {\n      \"type\":\"kafka_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/server.log\"\n    },\n    {\n      \"type\":\"kafka_statechange\",\n      \"rowtype\":\"service
 \",\n      \"path\":\"{{kafka_log_dir}}/state-change.log\"\n    },\n    {\n      \"type\":\"knox_gateway\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/gateway.log\"\n    },\n    {\n      \"type\":\"knox_cli\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/knoxcli.log\"\n    },\n    {\n      \"type\":\"knox_ldap\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/ldap.log\"\n    },\n    {\n      \"type\":\"mapred_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log\"\n    },\n    {\n      \"type\":\"logsearch_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logsearch_log_dir}}/logsearch.log\"\n    },\n    {\n      \"type\":\"logsearch_feeder\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logfeeder_log_dir}}/logfeeder.log\"\n    },\n    {\n      \"type\":\"logsearch_perf\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{l
 ogsearch_log_dir}}/logsearch-performance.log\"\n    },\n    {\n      \"type\":\"ranger_admin\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_admin_log_dir}}/xa_portal.log\"\n    },\n    {\n      \"type\":\"ranger_dbpatch\",\n      \"is_enabled\":\"true\",\n      \"path\":\"{{ranger_admin_log_dir}}/ranger_db_patch.log\"\n    },\n    {\n      \"type\":\"ranger_kms\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_kms_log_dir}}/kms.log\"\n    },\n    {\n      \"type\":\"ranger_usersync\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_usersync_log_dir}}/usersync.log\"\n    },\n    {\n      \"type\":\"oozie_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{oozie_log_dir}}/oozie.log\"\n    },\n    {\n      \"type\":\"yarn_nodemanager\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_resourcemanager\",\n      \"rowtype\":\"service\",\n      \"p
 ath\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_timelineserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_jobsummary\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log\"\n    },\n    {\n      \"type\":\"storm_drpc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/drpc.log\"\n    },\n    {\n      \"type\":\"storm_logviewer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/logviewer.log\"\n    },\n    {\n      \"type\":\"storm_nimbus\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/nimbus.log\"\n    },\n    {\n      \"type\":\"st
 orm_supervisor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/supervisor.log\"\n    },\n    {\n      \"type\":\"storm_ui\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/ui.log\"\n    },\n    {\n      \"type\":\"storm_worker\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/*worker*.log\"\n    },\n    {\n      \"type\":\"zookeeper\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{zk_log_dir}}/zookeeper/zookeeper*.out\"\n    },\n    {\n      \"type\":\"hdfs_audit\",\n      \"rowtype\":\"audit\",\n      \"is_enabled\":\"true\",\n      \"add_fields\":{\n        \"logType\":\"HDFSAudit\",\n        \"enforcer\":\"hadoop-acl\",\n        \"repoType\":\"1\",\n        \"repo\":\"hdfs\"\n      },\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log\"\n    }\n    \n  ],\n  \"filter\":[\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_mas
 ter\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"This one has one extra space after LEVEL\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_gc\",\n            \"accumulo_monitor\",\n            \"accumulo_tracer\",\n            \"accumulo_tserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n\",\n 
      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"atlas_app\",\n            \"falcon_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\"
 :{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_collector\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %p %c: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_hbase_master\",\n    
         \"ams_hbase_regionserver\",\n            \"hbase_master\",\n            \"hbase_regionserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t] %c{2}: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_agent\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})\",\n      \"
 message_pattern\":\"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        },\n        \"level\":{\n          \"map_fieldvalue\":{\n            \"pre_value\":\"WARNING\",\n            \"post_value\":\"WARN\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_server\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{DATE} %5p [%t] %c{1}:%L - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}
 %{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_datanode\",\n            \"hdfs_journalnode\",\n            \"hdfs_secondarynamenode\",\n            \"hdfs_namenode\",\n            \"hdfs_zkfc\",\n            \"knox_gateway\",\n            \"knox_cli\",\n            \"knox_ldap\",\n            \"mapred_historyserver\",\n            \"yarn_historyserver\",\n            \"yarn_jobsummary\",\n            \"yarn_nodemanager\",\n            \"yarn_resourcemanager\",\n            \"yarn_timelineserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTA
 MP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hive_hiveserver2\",\n            \"hive_metastore\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SP
 ACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_controller\",\n            \"kafka_request\",\n            \"kafka_logcleaner\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,S
 SS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_server\",\n            \"kafka_statechange\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"o
 ozie_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"logsearch_app\",\n            \"logsearch_feeder\",\n            \"logsearch_perf\",\n            \"ranger_admin\",\n            \"ranger_dbpatch\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\",\n  
     \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_kms\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{1} - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n  
     \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_usersync\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n   
      \"fields\":{\n          \"type\":[\n            \"storm_drpc\",\n            \"storm_logviewer\",\n            \"storm_nimbus\",\n            \"storm_supervisor\",\n            \"storm_ui\",\n            \"storm_worker\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss.SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"zookeeper\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} - %-5p [%t:%C{1}@%
 L] - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:evtTime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n    
     \"evtTime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"keyvalue\",\n      \"sort_order\":1,\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"source_field\":\"log_message\",\n      \"value_split\":\"=\",\n      \"field_split\":\"\t\",\n      \"post_map_values\":{\n        \"src\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"resource\"\n          }\n          \n        },\n        \"ip\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliIP\"\n          }\n          \n        },\n        \"allowed\":[\n          {\n            \"map_fieldvalue\":{\n              \"pre_value\":\"true\",\n              \"post_value\":\"1\"\n            }\n            \n          },\n          {\n            \"map
 _fieldvalue\":{\n              \"pre_value\":\"false\",\n              \"post_value\":\"0\"\n            }\n            \n          },\n          {\n            \"map_fieldname\":{\n              \"new_fieldname\":\"result\"\n            }\n            \n          }\n          \n        ],\n        \"cmd\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"action\"\n          }\n          \n        },\n        \"proto\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliType\"\n          }\n          \n        },\n        \"callerContext\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"req_caller_id\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"sort_order\":2,\n      \"source_field\":\"ugi\",\n      \"remove_source_field\":\"false\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        
 }\n        \n      },\n      \"message_pattern\":\"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}\",\n      \"post_map_values\":{\n        \"user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"x_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"p_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"k_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"proxyUsers\"\n          }\n          \n        },\n        \"p_authType\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"authType\"\n          }\n          \n        },\n        \"k_authType\":{\n          \"map_fieldname\":{\n            \"new_fie
 ldname\":\"proxyAuthType\"\n          }\n          \n        }\n        \n      }\n      \n    }\n    \n  ],\n  \"output\":[\n    {\n      \"is_enabled\":\"{{solr_service_logs_enable}}\",\n      \"comment\":\"Output to solr for service logs\",\n      \"destination\":\"solr\",\n      \"zk_hosts\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collection_service_logs}}\",\n      \"number_of_shards\": \"{{logsearch_collection_service_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_service_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"comment\":\"Output to solr for audit records\",\n      \"is_enabled\":\"{{solr_audit_logs_enable}}\",\n      \"destination\":\"solr\",\n      \"zk_hosts\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collection_audit_logs}}\
 ",\n      \"number_of_shards\": \"{{logsearch_collection_audit_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_audit_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_service_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_broker_list}}\",\n      \"topic\":\"{{kafka_topic_service_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_audit_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_broker_list}}\",\n    
   \"topic\":\"{{kafka_topic_audit_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    }\n    \n  ]\n  \n}"
+        "content": "{\n  \"global\":{\n    \"add_fields\":{\n      \"cluster\":\"{{cluster_name}}\"\n    },\n    \"source\":\"file\",\n    \"tail\":\"true\",\n    \"gen_event_md5\":\"true\",\n    \"start_position\":\"beginning\"\n  },\n  \"input\":[\n    {\n      \"type\":\"accumulo_gc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/gc_*.log\"\n    },\n    {\n      \"type\":\"accumulo_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/master_*.log\"\n    },\n    {\n      \"type\":\"accumulo_monitor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/monitor_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tracer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tracer_*.log\"\n    },\n    {\n      \"type\":\"accumulo_tserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{accumulo_log_dir}}/tserver_*.log\"\n    },\n    {\n      \"type\":\"atlas_app\",\n      \"rowtype\":\"se
 rvice\",\n      \"path\":\"{{atlas_log_dir}}/application.log\"\n    },\n    {\n      \"type\":\"ambari_agent\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_agent_log_dir}}/ambari-agent.log\"\n    },\n    {\n      \"type\":\"ambari_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ambari_server_log_dir}}/ambari-server.log\"\n    },\n    {\n      \"type\":\"ams_hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-master-*.log\"\n    },\n    {\n      \"type\":\"ams_hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log\"\n    },\n    {\n      \"type\":\"ams_collector\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{metrics_collector_log_dir}}/ambari-metrics-collector.log\"\n    },\n    {\n      \"type\":\"falcon_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{falcon_log_dir}}/falcon.application.log\"\n    },\n    {\
 n      \"type\":\"hbase_master\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-master-*.log\"\n    },\n    {\n      \"type\":\"hbase_regionserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log\"\n    },\n    {\n      \"type\":\"hdfs_datanode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_namenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_journalnode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_secondarynamenode\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log\"\n    },\n    {\n      \"type\":\"hdfs_zkfc\",\n      \"r
 owtype\":\"service\",\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log\"\n    },\n    {\n      \"type\":\"hive_hiveserver2\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hiveserver2.log\"\n    },\n    {\n      \"type\":\"hive_metastore\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{hive_log_dir}}/hivemetastore.log\"\n    },\n    {\n      \"type\":\"kafka_controller\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/controller.log\"\n    },\n    {\n      \"type\":\"kafka_request\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/kafka-request.log\"\n    },\n    {\n      \"type\":\"kafka_logcleaner\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/log-cleaner.log\"\n    },\n    {\n      \"type\":\"kafka_server\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{kafka_log_dir}}/server.log\"\n    },\n    {\n      \"type\":\"kafka_statechange\",\n      \"rowtype\":\"service
 \",\n      \"path\":\"{{kafka_log_dir}}/state-change.log\"\n    },\n    {\n      \"type\":\"knox_gateway\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/gateway.log\"\n    },\n    {\n      \"type\":\"knox_cli\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/knoxcli.log\"\n    },\n    {\n      \"type\":\"knox_ldap\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{knox_log_dir}}/ldap.log\"\n    },\n    {\n      \"type\":\"mapred_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log\"\n    },\n    {\n      \"type\":\"logsearch_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logsearch_log_dir}}/logsearch.log\"\n    },\n    {\n      \"type\":\"logsearch_feeder\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{logfeeder_log_dir}}/logfeeder.log\"\n    },\n    {\n      \"type\":\"logsearch_perf\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{l
 ogsearch_log_dir}}/logsearch-performance.log\"\n    },\n    {\n      \"type\":\"ranger_admin\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_admin_log_dir}}/xa_portal.log\"\n    },\n    {\n      \"type\":\"ranger_dbpatch\",\n      \"is_enabled\":\"true\",\n      \"path\":\"{{ranger_admin_log_dir}}/ranger_db_patch.log\"\n    },\n    {\n      \"type\":\"ranger_kms\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_kms_log_dir}}/kms.log\"\n    },\n    {\n      \"type\":\"ranger_usersync\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{ranger_usersync_log_dir}}/usersync.log\"\n    },\n    {\n      \"type\":\"oozie_app\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{oozie_log_dir}}/oozie.log\"\n    },\n    {\n      \"type\":\"yarn_nodemanager\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_resourcemanager\",\n      \"rowtype\":\"service\",\n      \"p
 ath\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log\"\n    },\n    {\n      \"type\":\"yarn_timelineserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_historyserver\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log\"\n    },\n    {\n      \"type\":\"yarn_jobsummary\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log\"\n    },\n    {\n      \"type\":\"storm_drpc\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/drpc.log\"\n    },\n    {\n      \"type\":\"storm_logviewer\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/logviewer.log\"\n    },\n    {\n      \"type\":\"storm_nimbus\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/nimbus.log\"\n    },\n    {\n      \"type\":\"st
 orm_supervisor\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/supervisor.log\"\n    },\n    {\n      \"type\":\"storm_ui\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/ui.log\"\n    },\n    {\n      \"type\":\"storm_worker\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{storm_log_dir}}/*worker*.log\"\n    },\n    {\n      \"type\":\"zookeeper\",\n      \"rowtype\":\"service\",\n      \"path\":\"{{zk_log_dir}}/zookeeper/zookeeper*.out\"\n    },\n    {\n      \"type\":\"hdfs_audit\",\n      \"rowtype\":\"audit\",\n      \"is_enabled\":\"true\",\n      \"add_fields\":{\n        \"logType\":\"HDFSAudit\",\n        \"enforcer\":\"hadoop-acl\",\n        \"repoType\":\"1\",\n        \"repo\":\"hdfs\"\n      },\n      \"path\":\"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log\"\n    }\n    \n  ],\n  \"filter\":[\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_mas
 ter\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"This one has one extra space after LEVEL\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"accumulo_gc\",\n            \"accumulo_monitor\",\n            \"accumulo_tracer\",\n            \"accumulo_tserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n\",\n 
      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"atlas_app\",\n            \"falcon_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\"
 :{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_collector\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %p %c: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ams_hbase_master\",\n    
         \"ams_hbase_regionserver\",\n            \"hbase_master\",\n            \"hbase_regionserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t] %c{2}: %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_agent\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})\",\n      \"
 message_pattern\":\"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        },\n        \"level\":{\n          \"map_fieldvalue\":{\n            \"pre_value\":\"WARNING\",\n            \"post_value\":\"WARN\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ambari_server\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{DATE} %5p [%t] %c{1}:%L - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}
 %{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_datanode\",\n            \"hdfs_journalnode\",\n            \"hdfs_secondarynamenode\",\n            \"hdfs_namenode\",\n            \"hdfs_zkfc\",\n            \"knox_gateway\",\n            \"knox_cli\",\n            \"knox_ldap\",\n            \"mapred_historyserver\",\n            \"yarn_historyserver\",\n            \"yarn_jobsummary\",\n            \"yarn_nodemanager\",\n            \"yarn_resourcemanager\",\n            \"yarn_timelineserver\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTA
 MP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hive_hiveserver2\",\n            \"hive_metastore\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SP
 ACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_controller\",\n            \"kafka_request\",\n            \"kafka_logcleaner\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,S
 SS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"comment\":\"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"kafka_server\",\n            \"kafka_statechange\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"[%d] %p %m (%c)%n\",\n      \"multiline_pattern\":\"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])\",\n      \"message_pattern\":\"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"o
 ozie_app\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"logsearch_app\",\n            \"logsearch_feeder\",\n            \"logsearch_perf\",\n            \"ranger_admin\",\n            \"ranger_dbpatch\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d [%t] %-5p %C{6} (%F:%L) - %m%n\",\n  
     \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_kms\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{1} - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n  
     \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"ranger_usersync\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n\",\n      \"multiline_pattern\":\"^(%{USER_SYNC_DATE:logtime})\",\n      \"message_pattern\":\"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"dd MMM yyyy HH:mm:ss\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n   
      \"fields\":{\n          \"type\":[\n            \"storm_drpc\",\n            \"storm_logviewer\",\n            \"storm_nimbus\",\n            \"storm_supervisor\",\n            \"storm_ui\",\n            \"storm_worker\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss.SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"zookeeper\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} - %-5p [%t:%C{1}@%
 L] - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:logtime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n        \"logtime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"log4j_format\":\"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n\",\n      \"multiline_pattern\":\"^(%{TIMESTAMP_ISO8601:evtTime})\",\n      \"message_pattern\":\"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\",\n      \"post_map_values\":{\n    
     \"evtTime\":{\n          \"map_date\":{\n            \"date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"keyvalue\",\n      \"sort_order\":1,\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        }\n        \n      },\n      \"source_field\":\"log_message\",\n      \"value_split\":\"=\",\n      \"field_split\":\"\t\",\n      \"post_map_values\":{\n        \"src\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"resource\"\n          }\n          \n        },\n        \"ip\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliIP\"\n          }\n          \n        },\n        \"allowed\":[\n          {\n            \"map_fieldvalue\":{\n              \"pre_value\":\"true\",\n              \"post_value\":\"1\"\n            }\n            \n          },\n          {\n            \"map
 _fieldvalue\":{\n              \"pre_value\":\"false\",\n              \"post_value\":\"0\"\n            }\n            \n          },\n          {\n            \"map_fieldname\":{\n              \"new_fieldname\":\"result\"\n            }\n            \n          }\n          \n        ],\n        \"cmd\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"action\"\n          }\n          \n        },\n        \"proto\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"cliType\"\n          }\n          \n        },\n        \"callerContext\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"req_caller_id\"\n          }\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"filter\":\"grok\",\n      \"sort_order\":2,\n      \"source_field\":\"ugi\",\n      \"remove_source_field\":\"false\",\n      \"conditions\":{\n        \"fields\":{\n          \"type\":[\n            \"hdfs_audit\"\n          ]\n          \n        
 }\n        \n      },\n      \"message_pattern\":\"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}\",\n      \"post_map_values\":{\n        \"user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"x_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"p_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"reqUser\"\n          }\n          \n        },\n        \"k_user\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"proxyUsers\"\n          }\n          \n        },\n        \"p_authType\":{\n          \"map_fieldname\":{\n            \"new_fieldname\":\"authType\"\n          }\n          \n        },\n        \"k_authType\":{\n          \"map_fieldname\":{\n            \"new_fie
 ldname\":\"proxyAuthType\"\n          }\n          \n        }\n        \n      }\n      \n    }\n    \n  ],\n  \"output\":[\n    {\n      \"is_enabled\":\"{{solr_service_logs_enable}}\",\n      \"comment\":\"Output to solr for service logs\",\n      \"destination\":\"solr\",\n      \"zk_connect_string\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collection_service_logs}}\",\n      \"number_of_shards\": \"{{logsearch_collection_service_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_service_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"comment\":\"Output to solr for audit records\",\n      \"is_enabled\":\"{{solr_audit_logs_enable}}\",\n      \"destination\":\"solr\",\n      \"zk_connect_string\":\"{{zookeeper_quorum}}{{solr_znode}}\",\n      \"collection\":\"{{solr_collec
 tion_audit_logs}}\",\n      \"number_of_shards\": \"{{logsearch_collection_audit_logs_numshards}}\",\n      \"splits_interval_mins\": \"{{logsearch_audit_logs_split_interval_mins}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_service_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_broker_list}}\",\n      \"topic\":\"{{kafka_topic_service_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"service\"\n          ]\n          \n        }\n        \n      }\n      \n    },\n    {\n      \"is_enabled\":\"{{kafka_audit_logs_enable}}\",\n      \"destination\":\"kafka\",\n      \"broker_list\":\"{{kafka_brok
 er_list}}\",\n      \"topic\":\"{{kafka_topic_audit_logs}}\",\n      \"kafka.security.protocol\":\"{{kafka_security_protocol}}\",\n      \"kafka.sasl.kerberos.service.name\":\"{{kafka_kerberos_service_name}}\",\n      \"conditions\":{\n        \"fields\":{\n          \"rowtype\":[\n            \"audit\"\n          ]\n          \n        }\n        \n      }\n      \n    }\n    \n  ]\n  \n}"
       }
     },
     "configuration_attributes": {


[2/2] ambari git commit: AMBARI-17117. Fix misnamed Zookeeper connect strings in Log Search (Miklos Gergely via oleewere)

Posted by ol...@apache.org.
AMBARI-17117. Fix misnamed Zookeeper connect strings in Log Search (Miklos Gergely via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a2c23b21
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a2c23b21
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a2c23b21

Branch: refs/heads/trunk
Commit: a2c23b213380cf7e8ccbf1db130a0925cc159516
Parents: 6c2736a
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Tue Jun 21 19:05:52 2016 +0200
Committer: oleewere <ol...@gmail.com>
Committed: Tue Jun 21 19:11:23 2016 +0200

----------------------------------------------------------------------
 .../logconfig/FetchConfigFromSolr.java          |  8 ++--
 .../ambari/logfeeder/output/OutputSolr.java     | 46 +++++++++++---------
 .../apache/ambari/logfeeder/util/SolrUtil.java  | 18 ++++----
 .../src/main/resources/config.json.j2           |  4 +-
 .../src/main/resources/logfeeder.properties     |  2 +-
 .../src/main/resources/output.config.json.j2    |  4 +-
 .../ambari/logfeeder/output/OutputSolrTest.java |  8 ++--
 .../ambari/logsearch/dao/AuditSolrDao.java      |  6 +--
 .../logsearch/dao/ServiceLogsSolrDao.java       |  6 +--
 .../ambari/logsearch/dao/SolrDaoBase.java       | 20 ++++-----
 .../ambari/logsearch/dao/UserConfigSolrDao.java |  6 +--
 .../solr/metrics/SolrMetricsLoader.java         |  4 +-
 .../src/main/resources/logsearch.properties.j2  |  4 +-
 .../logsearch/solr/AmbariSolrCloudCLI.java      | 24 +++++-----
 .../logsearch/solr/AmbariSolrCloudClient.java   |  8 ++--
 .../solr/AmbariSolrCloudClientBuilder.java      | 10 ++---
 .../solr/commands/GetSolrHostsCommand.java      |  2 +-
 .../solr/AmbariSolrCloudClientTest.java         |  2 +-
 .../package/templates/logfeeder.properties.j2   |  2 +-
 .../package/templates/logsearch.properties.j2   |  4 +-
 .../package/templates/output.config.json.j2     |  4 +-
 .../test/python/stacks/2.4/configs/default.json |  2 +-
 22 files changed, 99 insertions(+), 95 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
index 4240b86..5b9fe8f 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/logconfig/FetchConfigFromSolr.java
@@ -47,12 +47,12 @@ public class FetchConfigFromSolr extends Thread {
 
   @Override
   public void run() {
-    String zkHosts = LogFeederUtil.getStringProperty("logfeeder.solr.zkhosts");
+    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
     String solrUrl = LogFeederUtil.getStringProperty("logfeeder.solr.url");
-    if ((zkHosts == null || zkHosts.trim().length() == 0)
+    if ((zkConnectString == null || zkConnectString.trim().length() == 0 )
         && (solrUrl == null || solrUrl.trim().length() == 0)) {
-      logger
-          .warn("Solr ZKHosts or solrUrl for UserConfig/History is not set. Won't look for level configuration from Solr.");
+      logger.warn("Neither Solr ZK Connect String nor solr Uril for UserConfig/History is set." +
+          "Won't look for level configuration from Solr.");
       return;
     }
     solrConfigInterval = LogFeederUtil.getIntProperty("logfeeder.solr.config.interval", solrConfigInterval);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
index c945ed7..0480fbd 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputSolr.java
@@ -133,45 +133,49 @@ public class OutputSolr extends Output {
 
   private void createSolrWorkers() throws Exception, MalformedURLException {
     String solrUrl = getStringValue("url");
-    String zkHosts = getStringValue("zk_hosts");
-    if (StringUtils.isEmpty(solrUrl) && StringUtils.isEmpty(zkHosts)) {
-      throw new Exception("For solr output, either url or zk_hosts property need to be set");
+    String zkConnectString = getStringValue("zk_connect_string");
+    if (StringUtils.isEmpty(solrUrl) && StringUtils.isEmpty(zkConnectString)) {
+      throw new Exception("For solr output, either url or zk_connect_string property need to be set");
     }
 
     for (int count = 0; count < workers; count++) {
-      SolrClient solrClient = getSolrClient(solrUrl, zkHosts, count);
+      SolrClient solrClient = getSolrClient(solrUrl, zkConnectString, count);
       createSolrWorkerThread(count, solrClient);
     }
   }
 
-  SolrClient getSolrClient(String solrUrl, String zkHosts, int count) throws Exception, MalformedURLException {
-    SolrClient solrClient = createSolrClient(solrUrl, zkHosts, collection);
-    pingSolr(solrUrl, zkHosts, count, solrClient);
+  SolrClient getSolrClient(String solrUrl, String zkConnectString, int count) throws Exception, MalformedURLException {
+    SolrClient solrClient = createSolrClient(solrUrl, zkConnectString);
+    pingSolr(solrUrl, zkConnectString, count, solrClient);
     waitForConfig();
 
     return solrClient;
   }
 
-  private SolrClient createSolrClient(String solrUrl, String zkHosts, String collection) throws Exception, MalformedURLException {
+  private SolrClient createSolrClient(String solrUrl, String zkConnectString) throws Exception, MalformedURLException {
     SolrClient solrClient;
-    if (zkHosts != null) {
-      solrClient = createCloudSolrClient(zkHosts, collection);
+    if (zkConnectString != null) {
+      solrClient = createCloudSolrClient(zkConnectString);
     } else {
-      solrClient = createHttpSolarClient(solrUrl, collection);
+      solrClient = createHttpSolarClient(solrUrl);
     }
     return solrClient;
   }
 
-  private SolrClient createCloudSolrClient(String zkHosts, String collection) throws Exception {
-    LOG.info("Using zookeepr. zkHosts=" + zkHosts);
+  private SolrClient createCloudSolrClient(String zkConnectString) throws Exception {
+    LOG.info("Using zookeepr. zkConnectString=" + zkConnectString);
+    collection = getStringValue("collection");
+    if (StringUtils.isEmpty(collection)) {
+      throw new Exception("For solr cloud property collection is mandatory");
+    }
     LOG.info("Using collection=" + collection);
 
-    CloudSolrClient solrClient = new CloudSolrClient(zkHosts);
+    CloudSolrClient solrClient = new CloudSolrClient(zkConnectString);
     solrClient.setDefaultCollection(collection);
     return solrClient;
   }
 
-  private SolrClient createHttpSolarClient(String solrUrl, String collection) throws MalformedURLException {
+  private SolrClient createHttpSolarClient(String solrUrl) throws MalformedURLException {
     String[] solrUrls = StringUtils.split(solrUrl, ",");
     if (solrUrls.length == 1) {
       LOG.info("Using SolrURL=" + solrUrl);
@@ -188,9 +192,9 @@ public class OutputSolr extends Output {
     }
   }
 
-  private void pingSolr(String solrUrl, String zkHosts, int count, SolrClient solrClient) {
+  private void pingSolr(String solrUrl, String zkConnectString, int count, SolrClient solrClient) {
     try {
-      LOG.info("Pinging Solr server. zkHosts=" + zkHosts + ", urls=" + solrUrl);
+      LOG.info("Pinging Solr server. zkConnectString=" + zkConnectString + ", urls=" + solrUrl);
       SolrPingResponse response = solrClient.ping();
       if (response.getStatus() == 0) {
         LOG.info("Ping to Solr server is successful for worker=" + count);
@@ -198,13 +202,13 @@ public class OutputSolr extends Output {
         LOG.warn(
             String.format(
                 "Ping to Solr server failed. It would check again. worker=%d, "
-                    + "solrUrl=%s, zkHosts=%s, collection=%s, response=%s",
-                count, solrUrl, zkHosts, collection, response));
+                    + "solrUrl=%s, zkConnectString=%s, collection=%s, response=%s",
+                count, solrUrl, zkConnectString, collection, response));
       }
     } catch (Throwable t) {
       LOG.warn(String.format(
-          "Ping to Solr server failed. It would check again. worker=%d, solrUrl=%s, zkHosts=%s, collection=%s",
-          count, solrUrl, zkHosts, collection), t);
+          "Ping to Solr server failed. It would check again. worker=%d, " + "solrUrl=%s, zkConnectString=%s, collection=%s",
+          count, solrUrl, zkConnectString, collection), t);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
index 31fbded..2257e32 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/SolrUtil.java
@@ -53,9 +53,9 @@ public class SolrUtil {
 
   private SolrUtil() throws Exception {
     String url = LogFeederUtil.getStringProperty("logfeeder.solr.url");
-    String zkHosts = LogFeederUtil.getStringProperty("logfeeder.solr.zkhosts");
+    String zkConnectString = LogFeederUtil.getStringProperty("logfeeder.solr.zk_connect_string");
     String collection = LogFeederUtil.getStringProperty("logfeeder.solr.core.config.name", "history");
-    connectToSolr(url, zkHosts, collection);
+    connectToSolr(url, zkConnectString, collection);
   }
 
   public static SolrUtil getInstance() {
@@ -78,10 +78,10 @@ public class SolrUtil {
     return instance;
   }
 
-  public SolrClient connectToSolr(String url, String zkHosts,
+  public SolrClient connectToSolr(String url, String zkConnectString,
                                   String collection) throws Exception {
     this.collectionName = collection;
-    solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection
       + ", url=" + url;
 
     logger.info("connectToSolr() " + solrDetail);
@@ -89,18 +89,18 @@ public class SolrUtil {
       throw new Exception("For solr, collection name is mandatory. "
         + solrDetail);
     }
-    if (zkHosts != null && !zkHosts.isEmpty()) {
-      solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection;
+    if (zkConnectString != null && !zkConnectString.isEmpty()) {
+      solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
       logger.info("Using zookeepr. " + solrDetail);
-      solrClouldClient = new CloudSolrClient(zkHosts);
+      solrClouldClient = new CloudSolrClient(zkConnectString);
       solrClouldClient.setDefaultCollection(collection);
       solrClient = solrClouldClient;
       int waitDurationMS = 3 * 60 * 1000;
       checkSolrStatus(waitDurationMS);
     } else {
       if (url == null || url.trim().isEmpty()) {
-        throw new Exception("Both zkHosts and URL are empty. zkHosts="
-          + zkHosts + ", collection=" + collection + ", url="
+        throw new Exception("Both zkConnectString and URL are empty. zkConnectString="
+          + zkConnectString + ", collection=" + collection + ", url="
           + url);
       }
       solrDetail = "collection=" + collection + ", url=" + url;

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
index b6301ca..3428dd8 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
@@ -928,7 +928,7 @@
 			"is_enabled":"{{solr_service_logs_enable}}",
 			"comment":"Output to solr for service logs",
 			"destination":"solr",
-			"zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
 			"collection":"{{logsearch_solr_collection_service_logs}}",
 			"number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
 			"splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
@@ -947,7 +947,7 @@
 			"comment":"Output to solr for audit records",
 			"is_enabled":"{{solr_audit_logs_enable}}",
 			"destination":"solr",
-			"zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
 			"collection":"{{logsearch_solr_collection_audit_logs}}",
 			"number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
 			"splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties
index b4655cc..982a72d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/logfeeder.properties
@@ -19,7 +19,7 @@ logfeeder.metrics.collector.hosts=
 #filter config
 logfeeder.log.filter.enable=true
 logfeeder.solr.config.interval=5
-logfeeder.solr.zkhosts=
+logfeeder.solr.zk_connect_string=
 logfeeder.solr.url=
 
 logfeeder.solr.kerberos.enable=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
index a485600..63c590e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
@@ -21,7 +21,7 @@
 			"is_enabled":"{{solr_service_logs_enable}}",
 			"comment":"Output to solr for service logs",
 			"destination":"solr",
-			"zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
 			"collection":"{{logsearch_solr_collection_service_logs}}",
 			"number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
 			"splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
@@ -40,7 +40,7 @@
 			"comment":"Output to solr for audit records",
 			"is_enabled":"{{solr_audit_logs_enable}}",
 			"destination":"solr",
-			"zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
 			"collection":"{{logsearch_solr_collection_audit_logs}}",
 			"number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
 			"splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
index 3014ed8..33bb33f 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputSolrTest.java
@@ -57,7 +57,7 @@ public class OutputSolrTest {
   public void init() throws Exception {
     outputSolr = new OutputSolr() {
       @Override
-      SolrClient getSolrClient(String solrUrl, String zkHosts, int count) throws Exception, MalformedURLException {
+      SolrClient getSolrClient(String solrUrl, String zkConnectString, int count) throws Exception, MalformedURLException {
         return new CloudSolrClient(null) {
           private static final long serialVersionUID = 1L;
 
@@ -146,11 +146,11 @@ public class OutputSolrTest {
   }
 
   @Test
-  public void testOutputToSolr_noUrlOrZKHost() throws Exception {
-    LOG.info("testOutputToSolr_noUrlOrZKHost()");
+  public void testOutputToSolr_noUrlOrZkConnectString() throws Exception {
+    LOG.info("testOutputToSolr_noUrlOrZkConnectString()");
 
     expectedException.expect(Exception.class);
-    expectedException.expectMessage("For solr output, either url or zk_hosts property need to be set");
+    expectedException.expectMessage("For solr output, either url or zk_connect_string property need to be set");
 
     Map<String, Object> config = new HashMap<String, Object>();
     config.put("workers", "3");

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
index 03ff0ff..f1789c1 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/AuditSolrDao.java
@@ -38,7 +38,7 @@ public class AuditSolrDao extends SolrDaoBase {
   @PostConstruct
   public void postConstructor() {
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.audit.logs.url");
-    String zkHosts = PropertiesUtil.getProperty("logsearch.solr.audit.logs.zkhosts");
+    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.audit.logs.zk_connect_string");
     String collection = PropertiesUtil.getProperty(
       "logsearch.solr.collection.audit.logs", "audit_logs");
     String splitInterval = PropertiesUtil.getProperty(
@@ -51,13 +51,13 @@ public class AuditSolrDao extends SolrDaoBase {
       "logsearch.collection.audit.logs.replication.factor", 1);
 
     try {
-      connectToSolr(solrUrl, zkHosts, collection);
+      connectToSolr(solrUrl, zkConnectString, collection);
       setupCollections(splitInterval, configName, numberOfShards,
         replicationFactor);
     } catch (Exception e) {
       logger.error(
         "Error while connecting to Solr for audit logs : solrUrl="
-          + solrUrl + ", zkHosts=" + zkHosts
+          + solrUrl + ", zkConnectString=" + zkConnectString
           + ", collection=" + collection, e);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
index 14125bc..8c0f9a6 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/ServiceLogsSolrDao.java
@@ -39,7 +39,7 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
   public void postConstructor() {
     logger.info("postConstructor() called.");
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
-    String zkHosts = PropertiesUtil.getProperty("logsearch.solr.zkhosts");
+    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
     String collection = PropertiesUtil.getProperty("logsearch.solr.collection.service.logs",
       "hadoop_logs");
     String splitInterval = PropertiesUtil.getProperty(
@@ -52,13 +52,13 @@ public class ServiceLogsSolrDao extends SolrDaoBase {
       "logsearch.collection.service.logs.replication.factor", 1);
 
     try {
-      connectToSolr(solrUrl, zkHosts, collection);
+      connectToSolr(solrUrl, zkConnectString, collection);
       setupCollections(splitInterval, configName, numberOfShards,
         replicationFactor);
     } catch (Exception e) {
       logger.error(
         "error while connecting to Solr for service logs : solrUrl="
-          + solrUrl + ", zkHosts=" + zkHosts
+          + solrUrl + ", zkConnectString=" + zkConnectString
           + ", collection=" + collection, e);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
index 4564752..c13105a 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/SolrDaoBase.java
@@ -93,17 +93,17 @@ public abstract class SolrDaoBase {
 
   int SETUP_RETRY_SECOND = 30;
   
-  private boolean isZkhost=false;//by default its false
+  private boolean isZkConnectString=false;//by default its false
   
   //set logtype
   public SolrDaoBase(LOG_TYPE logType) {
     this.logType = logType;
   }
 
-  public SolrClient connectToSolr(String url, String zkHosts,
+  public SolrClient connectToSolr(String url, String zkConnectString,
                                   String collection) throws Exception {
     this.collectionName = collection;
-    solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection
+    solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection
       + ", url=" + url;
 
     logger.info("connectToSolr() " + solrDetail);
@@ -112,19 +112,19 @@ public abstract class SolrDaoBase {
         + solrDetail);
     }
     setupSecurity();
-    if (!stringUtil.isEmpty(zkHosts)) {
-      isZkhost=true;
-      solrDetail = "zkHosts=" + zkHosts + ", collection=" + collection;
+    if (!stringUtil.isEmpty(zkConnectString)) {
+      isZkConnectString=true;
+      solrDetail = "zkConnectString=" + zkConnectString + ", collection=" + collection;
       logger.info("Using zookeepr. " + solrDetail);
-      solrClouldClient = new CloudSolrClient(zkHosts);
+      solrClouldClient = new CloudSolrClient(zkConnectString);
       solrClouldClient.setDefaultCollection(collection);
       solrClient = solrClouldClient;
       int waitDurationMS = 3 * 60 * 1000;
       checkSolrStatus(waitDurationMS);
     } else {
       if (stringUtil.isEmpty(url)) {
-        throw new Exception("Both zkHosts and URL are empty. zkHosts="
-          + zkHosts + ", collection=" + collection + ", url="
+        throw new Exception("Both zkConnectString and URL are empty. zkConnectString="
+          + zkConnectString + ", collection=" + collection + ", url="
           + url);
       }
       solrDetail = "collection=" + collection + ", url=" + url;
@@ -192,7 +192,7 @@ public abstract class SolrDaoBase {
 
   public void setupCollections(final String splitMode, final String configName,
       final int numberOfShards, final int replicationFactor) throws Exception {
-    if (isZkhost) {
+    if (isZkConnectString) {
       setup_status = createCollectionsIfNeeded(splitMode, configName,
           numberOfShards, replicationFactor);
       logger.info("Setup status for " + collectionName + " is " + setup_status);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
index edf1dcc..cd9fb19 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/dao/UserConfigSolrDao.java
@@ -59,7 +59,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
   public void postConstructor() {
 
     String solrUrl = PropertiesUtil.getProperty("logsearch.solr.url");
-    String zkHosts = PropertiesUtil.getProperty("logsearch.solr.zkhosts");
+    String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
     String collection = PropertiesUtil.getProperty("logsearch.solr.collection.history",
       "history");
     String configName = PropertiesUtil.getProperty(
@@ -70,7 +70,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
     int numberOfShards = 1;
 
     try {
-      connectToSolr(solrUrl, zkHosts, collection);
+      connectToSolr(solrUrl, zkConnectString, collection);
       setupCollections(splitInterval, configName, numberOfShards,
         replicationFactor);
       intializeLogFeederFilter();
@@ -78,7 +78,7 @@ public class UserConfigSolrDao extends SolrDaoBase {
     } catch (Exception e) {
       logger.error(
         "error while connecting to Solr for history logs : solrUrl="
-          + solrUrl + ", zkHosts=" + zkHosts
+          + solrUrl + ", zkConnectString=" + zkConnectString
           + ", collection=" + collection, e);
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
index 21c010f..5f6a91c 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/solr/metrics/SolrMetricsLoader.java
@@ -180,9 +180,9 @@ public class SolrMetricsLoader extends TimerTask {
 
       int solrJmxPort = PropertiesUtil.getIntProperty("logsearch.solr.jmx.port");
 
-      String zkHosts = PropertiesUtil.getProperty("logsearch.solr.zkhosts");
+      String zkConnectString = PropertiesUtil.getProperty("logsearch.solr.zk_connect_string");
       AmbariSolrCloudClient ambariSolrCloudClient = new AmbariSolrCloudClientBuilder()
-          .withZookeeperHosts(zkHosts)
+          .withZkConnectString(zkConnectString)
           .build();
 
       Collection<String> solrHosts = ambariSolrCloudClient.getSolrHosts();

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2 b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
index 0a94186..8e3966e 100755
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/resources/logsearch.properties.j2
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-logsearch.solr.zkhosts={{zookeeper_quorum}}{{logsearch_solr_znode}}
+logsearch.solr.zk_connect_string={{zookeeper_quorum}}{{logsearch_solr_znode}}
 logsearch.solr.collection.service.logs={{logsearch_solr_collection_service_logs}}
 
 logsearch.service.logs.split.interval.mins={{logsearch_service_logs_split_interval_mins}}
@@ -23,7 +23,7 @@ logsearch.collection.service.logs.replication.factor={{logsearch_collection_serv
 logsearch.solr.collection.history={{logsearch_solr_collection_history}}
 
 #Audit logs
-logsearch.solr.audit.logs.zkhosts={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
+logsearch.solr.audit.logs.zk_connect_string={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
 logsearch.solr.collection.audit.logs={{logsearch_solr_collection_audit_logs}}
 logsearch.solr.audit.logs.url={{logsearch_solr_audit_logs_url}}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
index ad5d789..38946f6 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudCLI.java
@@ -99,11 +99,11 @@ public class AmbariSolrCloudCLI {
       .desc("Sharding not used when creating collection")
       .build();
 
-    final Option zookeeperHostOption = Option.builder("z")
-      .longOpt("zookeeper-host")
-      .desc("Zookeeper quorum and Znode")
+    final Option zkConnectStringOption = Option.builder("z")
+      .longOpt("zookeeper-connect-string")
+      .desc("Zookeeper quorum [and a Znode]")
       .numberOfArgs(1)
-      .argName("host:port,host:port../ambari-solr")
+      .argName("host:port,host:port[/ambari-solr]")
       .build();
 
     final Option collectionOption = Option.builder("c")
@@ -190,7 +190,7 @@ public class AmbariSolrCloudCLI {
     options.addOption(helpOption);
     options.addOption(retryOption);
     options.addOption(intervalOption);
-    options.addOption(zookeeperHostOption);
+    options.addOption(zkConnectStringOption);
     options.addOption(configSetOption);
     options.addOption(configDirOption);
     options.addOption(collectionOption);
@@ -220,19 +220,19 @@ public class AmbariSolrCloudCLI {
       String command = "";
       if (cli.hasOption("cc")) {
         command = CREATE_COLLECTION_COMMAND;
-        validateRequiredOptions(cli, command, zookeeperHostOption, collectionOption, configSetOption);
+        validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, configSetOption);
       } else if (cli.hasOption("uc")) {
         command = UPLOAD_CONFIG_COMMAND;
-        validateRequiredOptions(cli, command, zookeeperHostOption, configSetOption, configDirOption);
+        validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption);
       } else if (cli.hasOption("dc")) {
         command = DOWNLOAD_CONFIG_COMMAND;
-        validateRequiredOptions(cli, command, zookeeperHostOption, configSetOption, configDirOption);
+        validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption, configDirOption);
       } else if (cli.hasOption("csh")) {
         command = CREATE_SHARD_COMMAND;
-        validateRequiredOptions(cli, command, zookeeperHostOption, collectionOption, shardNameOption);
+        validateRequiredOptions(cli, command, zkConnectStringOption, collectionOption, shardNameOption);
       } else if (cli.hasOption("chc")) {
         command = CONFIG_CHECK_COMMAND;
-        validateRequiredOptions(cli, command, zookeeperHostOption, configSetOption);
+        validateRequiredOptions(cli, command, zkConnectStringOption, configSetOption);
       } else {
         List<String> commands = Arrays.asList(CREATE_COLLECTION_COMMAND, CREATE_SHARD_COMMAND, UPLOAD_CONFIG_COMMAND,
           DOWNLOAD_CONFIG_COMMAND, CONFIG_CHECK_COMMAND);
@@ -240,7 +240,7 @@ public class AmbariSolrCloudCLI {
         exit(1, String.format("One of the supported commands is required (%s)", StringUtils.join(commands, "|")));
       }
 
-      String zookeeperHosts = cli.getOptionValue('z');
+      String zkConnectString = cli.getOptionValue('z');
       String collection = cli.getOptionValue('c');
       String configSet = cli.getOptionValue("cs");
       String configDir = cli.getOptionValue("d");
@@ -257,7 +257,7 @@ public class AmbariSolrCloudCLI {
 
 
       AmbariSolrCloudClientBuilder clientBuilder = new AmbariSolrCloudClientBuilder()
-        .withZookeeperHosts(zookeeperHosts)
+        .withZkConnectString(zkConnectString)
         .withCollection(collection)
         .withConfigSet(configSet)
         .withShards(shards)

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
index 32a1821..33e94d0 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClient.java
@@ -43,7 +43,7 @@ public class AmbariSolrCloudClient {
 
   private static final Logger LOG = LoggerFactory.getLogger(AmbariSolrCloudClient.class);
 
-  private final String zookeeperHosts;
+  private final String zkConnectString;
   private final String collection;
   private final String configSet;
   private final String configDir;
@@ -60,7 +60,7 @@ public class AmbariSolrCloudClient {
   private String jaasFile;
 
   public AmbariSolrCloudClient(AmbariSolrCloudClientBuilder builder) {
-    this.zookeeperHosts = builder.zookeeperHosts;
+    this.zkConnectString = builder.zkConnectString;
     this.collection = builder.collection;
     this.configSet = builder.configSet;
     this.configDir = builder.configDir;
@@ -172,8 +172,8 @@ public class AmbariSolrCloudClient {
     return new GetSolrHostsCommand(getRetryTimes(), getInterval()).run(this);
   }
 
-  public String getZookeeperHosts() {
-    return zookeeperHosts;
+  public String getZkConnectString() {
+    return zkConnectString;
   }
 
   public String getCollection() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
index 50204c4..a5eebc2 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientBuilder.java
@@ -25,7 +25,7 @@ import org.apache.solr.client.solrj.impl.Krb5HttpClientConfigurer;
 import org.apache.solr.common.cloud.SolrZkClient;
 
 public class AmbariSolrCloudClientBuilder {
-  String zookeeperHosts;
+  String zkConnectString;
   String collection;
   String configSet;
   String configDir;
@@ -45,8 +45,8 @@ public class AmbariSolrCloudClientBuilder {
     return new AmbariSolrCloudClient(this);
   }
 
-  public AmbariSolrCloudClientBuilder withZookeeperHosts(String zookeeperHosts) {
-    this.zookeeperHosts = zookeeperHosts;
+  public AmbariSolrCloudClientBuilder withZkConnectString(String zkConnectString) {
+    this.zkConnectString = zkConnectString;
     return this;
   }
 
@@ -112,12 +112,12 @@ public class AmbariSolrCloudClientBuilder {
   }
 
   public AmbariSolrCloudClientBuilder withSolrCloudClient() {
-    this.solrCloudClient = new CloudSolrClient(this.zookeeperHosts);
+    this.solrCloudClient = new CloudSolrClient(this.zkConnectString);
     return this;
   }
 
   public AmbariSolrCloudClientBuilder withSolrZkClient(int zkClientTimeout, int zkClientConnectTimeout) {
-    this.solrZkClient = new SolrZkClient(this.zookeeperHosts, zkClientTimeout, zkClientConnectTimeout);
+    this.solrZkClient = new SolrZkClient(this.zkConnectString, zkClientTimeout, zkClientConnectTimeout);
     return this;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/GetSolrHostsCommand.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/GetSolrHostsCommand.java b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/GetSolrHostsCommand.java
index f814678..1aa8157 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/GetSolrHostsCommand.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/main/java/org/apache/ambari/logsearch/solr/commands/GetSolrHostsCommand.java
@@ -36,7 +36,7 @@ public class GetSolrHostsCommand extends AbstractRetryCommand<Collection<String>
   public Collection<String> createAndProcessRequest(AmbariSolrCloudClient solrCloudClient) throws Exception {
     List<String> solrHosts = new ArrayList<>();
 
-    ZooKeeper zk = new ZooKeeper(solrCloudClient.getZookeeperHosts(), 10000, null);
+    ZooKeeper zk = new ZooKeeper(solrCloudClient.getZkConnectString(), 10000, null);
     List<String> ids = zk.getChildren("/live_nodes", false);
     for (String id : ids) {
       if (id.endsWith("_solr")) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-logsearch/ambari-logsearch-solr-client/src/test/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-solr-client/src/test/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientTest.java b/ambari-logsearch/ambari-logsearch-solr-client/src/test/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientTest.java
index c382c14..edc9b5c 100644
--- a/ambari-logsearch/ambari-logsearch-solr-client/src/test/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientTest.java
+++ b/ambari-logsearch/ambari-logsearch-solr-client/src/test/java/org/apache/ambari/logsearch/solr/AmbariSolrCloudClientTest.java
@@ -60,7 +60,7 @@ public class AmbariSolrCloudClientTest {
     builder.solrZkClient = mockedSolrZkClient;
 
     underTest = builder
-      .withZookeeperHosts("localhost1:2181,localhost2:2182")
+      .withZkConnectString("localhost1:2181,localhost2:2182")
       .withCollection("collection1")
       .withConfigSet("configSet")
       .withShards(1)

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
index 31c252a..529fa8f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logfeeder.properties.j2
@@ -18,8 +18,8 @@ logfeeder.metrics.collector.hosts={{logfeeder_metrics_collector_hosts}}
 logfeeder.config.files={{logfeeder_config_files}}
 logfeeder.log.filter.enable={{logfeeder_log_filter_enable}}
 logfeeder.solr.config.interval={{logfeeder_solr_config_interval}}
-logfeeder.solr.zkhosts={{zookeeper_quorum}}{{logsearch_solr_znode}}
 logfeeder.solr.core.config.name=history
+logfeeder.solr.zk_connect_string={{zookeeper_quorum}}{{logsearch_solr_znode}}
 
 # Custom properties
 {% for key, value in logfeeder_custom_properties.items() %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
index 0bbba0f..3f5db30 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/logsearch.properties.j2
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-logsearch.solr.zkhosts={{zookeeper_quorum}}{{logsearch_solr_znode}}
+logsearch.solr.zk_connect_string={{zookeeper_quorum}}{{logsearch_solr_znode}}
 
 # Service Logs
 logsearch.solr.collection.service.logs={{logsearch_solr_collection_service_logs}}
@@ -25,7 +25,7 @@ logsearch.collection.service.logs.replication.factor={{logsearch_collection_serv
 logsearch.service.logs.fields={{logsearch_service_logs_fields}}
 
 # Audit logs
-logsearch.solr.audit.logs.zkhosts={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
+logsearch.solr.audit.logs.zk_connect_string={{logsearch_solr_audit_logs_zk_quorum}}{{logsearch_solr_audit_logs_zk_node}}
 logsearch.solr.collection.audit.logs={{logsearch_solr_collection_audit_logs}}
 logsearch.solr.audit.logs.url={{logsearch_solr_audit_logs_url}}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a2c23b21/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
index b31f39b..ba8df00 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/templates/output.config.json.j2
@@ -21,7 +21,7 @@
       "is_enabled":"{{solr_service_logs_enable}}",
       "comment":"Output to solr for service logs",
       "destination":"solr",
-      "zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+      "zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
       "collection":"{{logsearch_solr_collection_service_logs}}",
       "number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
       "splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
@@ -40,7 +40,7 @@
       "comment":"Output to solr for audit records",
       "is_enabled":"{{solr_audit_logs_enable}}",
       "destination":"solr",
-      "zk_hosts":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
+      "zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
       "collection":"{{logsearch_solr_collection_audit_logs}}",
       "number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
       "splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",