You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sw...@apache.org on 2016/12/08 23:30:04 UTC

[25/25] ambari git commit: Merge from branch-2.5

Merge from branch-2.5


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4278c4a4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4278c4a4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4278c4a4

Branch: refs/heads/branch-feature-AMBARI-18901
Commit: 4278c4a4f1240270d4f74636fecc6b590b722432
Parents: 1c5c7df
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Thu Dec 8 15:28:54 2016 -0800
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Thu Dec 8 15:28:54 2016 -0800

----------------------------------------------------------------------
 .../admin-web/app/scripts/services/Cluster.js   |   51 -
 .../app/scripts/services/RoleDetailsModal.js    |   31 +-
 .../app/views/modals/RoleDetailsModal.html      |    6 +-
 ambari-agent/conf/unix/ambari-agent.ini         |    1 +
 .../src/main/python/ambari_agent/ActionQueue.py |   11 +
 .../ambari_agent/CustomServiceOrchestrator.py   |    7 +-
 .../src/main/python/ambari_agent/FileCache.py   |   12 +-
 .../src/main/python/ambari_agent/Hardware.py    |   52 +-
 .../src/main/python/ambari_agent/HostInfo.py    |   12 +-
 .../test/python/ambari_agent/TestActionQueue.py |   48 +
 .../TestCustomServiceOrchestrator.py            |   34 +-
 .../test/python/ambari_agent/TestFileCache.py   |   10 +-
 .../test/python/ambari_agent/TestHardware.py    |   70 +
 .../ambari_commons/ambari_metrics_helper.py     |   52 +-
 .../main/python/ambari_commons/logging_utils.py |    5 +-
 .../ambari_commons/parallel_processing.py       |   95 +
 .../libraries/functions/constants.py            |    1 +
 .../libraries/functions/copy_tarball.py         |    2 +-
 .../dynamic_variable_interpretation.py          |    2 +-
 .../libraries/functions/package_conditions.py   |    2 +-
 .../libraries/functions/setup_atlas_hook.py     |   47 +-
 .../simulate_perf_cluster_alert_behaviour.py    |  108 ++
 .../libraries/script/dummy.py                   |   11 +
 .../ambari/logfeeder/mapper/MapperDateTest.java |   56 -
 .../org/apache/ambari/logsearch/LogSearch.java  |    4 +-
 .../ambari/logsearch/dao/SolrCollectionDao.java |   64 +-
 .../web/listener/LogSearchSessionListener.java  |   48 +
 .../docker/test-config/logsearch/log4j.xml      |    2 +-
 .../timeline/AbstractTimelineMetricsSink.java   |   51 +-
 .../availability/MetricCollectorHAHelper.java   |    1 -
 .../AbstractTimelineMetricSinkTest.java         |   10 +-
 .../availability/MetricCollectorHATest.java     |    1 -
 .../cache/HandleConnectExceptionTest.java       |    4 +
 .../conf/unix/ambari-metrics-grafana            |    6 +-
 .../timeline/HadoopTimelineMetricsSink.java     |    4 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |    6 +-
 .../src/main/python/core/config_reader.py       |    9 +-
 .../src/test/python/core/TestEmitter.py         |    2 +-
 .../timeline/HBaseTimelineMetricStore.java      |   18 +-
 .../timeline/TimelineMetricConfiguration.java   |   14 +
 .../aggregators/AbstractTimelineAggregator.java |    6 +-
 .../MetricCollectorHAController.java            |    4 +-
 .../TestApplicationHistoryServer.java           |   14 +-
 .../MetricCollectorHAControllerTest.java        |    4 +-
 ambari-project/pom.xml                          |   41 +
 ambari-server/checkstyle.xml                    |   17 +
 ambari-server/pom.xml                           |    4 +
 .../ambari/server/agent/AgentRequests.java      |    2 +-
 .../ambari/server/agent/ExecutionCommand.java   |    1 +
 .../ambari/server/checks/CheckDescription.java  |    4 +-
 .../AmbariManagementControllerImpl.java         |   54 +-
 .../internal/AlertTargetResourceProvider.java   |    2 +-
 .../BlueprintConfigurationProcessor.java        |    3 +-
 .../controller/internal/CalculatedStatus.java   |   52 +-
 .../ClusterStackVersionResourceProvider.java    |    4 +-
 .../internal/ConfigGroupResourceProvider.java   |   60 +-
 .../internal/RequestResourceProvider.java       |   34 +-
 .../internal/StageResourceProvider.java         |    2 +-
 .../internal/UpgradeResourceProvider.java       |    2 +-
 .../logging/LogSearchDataRetrievalService.java  |    7 +-
 .../controller/logging/LoggingCookieStore.java  |   44 +
 .../logging/LoggingRequestHelperImpl.java       |   42 +-
 .../apache/ambari/server/orm/dao/AlertsDAO.java |    6 +-
 .../entities/ClusterConfigMappingEntity.java    |   20 +-
 .../ambari/server/orm/entities/GroupEntity.java |    5 +-
 .../server/security/authorization/Users.java    |   10 +-
 .../serveraction/upgrades/ConfigureAction.java  |   16 +-
 .../serveraction/upgrades/FixLzoCodecPath.java  |   16 +-
 .../upgrades/FixOozieAdminUsers.java            |    9 +-
 .../upgrades/HBaseConfigCalculation.java        |   14 +-
 .../HBaseEnvMaxDirectMemorySizeAction.java      |   13 +-
 .../upgrades/HiveEnvClasspathAction.java        |   13 +-
 .../upgrades/HiveZKQuorumConfigAction.java      |    2 +-
 .../upgrades/OozieConfigCalculation.java        |   13 +-
 .../upgrades/RangerConfigCalculation.java       |    4 +-
 .../RangerKerberosConfigCalculation.java        |   20 +-
 .../upgrades/RangerKmsProxyConfig.java          |    3 +-
 .../upgrades/SparkShufflePropertyConfig.java    |    3 +-
 .../upgrades/YarnConfigCalculation.java         |    2 +-
 .../apache/ambari/server/stack/StackModule.java |  101 +-
 .../ambari/server/state/ComponentInfo.java      |   14 +
 .../org/apache/ambari/server/state/Config.java  |   22 +-
 .../ambari/server/state/ConfigFactory.java      |   20 +-
 .../apache/ambari/server/state/ConfigImpl.java  |  480 +++--
 .../apache/ambari/server/state/ServiceImpl.java |    2 +-
 .../server/state/cluster/ClusterImpl.java       |  110 +-
 .../server/state/cluster/ClustersImpl.java      |    2 +-
 .../server/state/configgroup/ConfigGroup.java   |   33 +-
 .../state/configgroup/ConfigGroupFactory.java   |   34 +-
 .../state/configgroup/ConfigGroupImpl.java      |  613 +++----
 .../ambari/server/state/host/HostImpl.java      |    2 +-
 .../services/RetryUpgradeActionService.java     |    2 +-
 .../ambari/server/state/stack/UpgradePack.java  |   31 +-
 .../stack/upgrade/ConfigurationCondition.java   |   72 +-
 .../svccomphost/ServiceComponentHostImpl.java   |    2 +-
 .../ambari/server/topology/AmbariContext.java   |   25 +-
 .../ambari/server/topology/TopologyManager.java |   19 +-
 .../ambari/server/update/HostUpdateHelper.java  |   10 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |   25 +-
 .../server/upgrade/UpgradeCatalog211.java       |   24 +-
 .../server/upgrade/UpgradeCatalog240.java       |    4 +-
 .../server/upgrade/UpgradeCatalog250.java       |   53 +-
 .../ambari/server/utils/RequestUtils.java       |   10 +
 .../python/ambari_server/serverConfiguration.py |   14 +-
 .../main/python/ambari_server/serverUpgrade.py  |   38 +-
 .../src/main/python/ambari_server/utils.py      |   23 +-
 .../src/main/python/ambari_server_main.py       |    8 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   11 +-
 .../1.6.1.2.2.0/package/scripts/params.py       |    2 +-
 .../AMBARI_INFRA/0.1.0/metainfo.xml             |    1 +
 .../0.1.0/package/scripts/setup_infra_solr.py   |    3 +-
 .../0.1.0/configuration/ams-env.xml             |    2 +-
 .../0.1.0/configuration/ams-site.xml            |   12 +
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |    1 +
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |    1 +
 .../0.1.0/package/scripts/metrics_collector.py  |    2 +
 .../package/scripts/metrics_grafana_util.py     |   55 +-
 .../0.1.0/package/scripts/params.py             |   28 +-
 .../0.1.0/package/scripts/service_check.py      |  226 +--
 .../metrics_grafana_datasource.json.j2          |    4 +-
 .../0.5.0.2.1/configuration/falcon-env.xml      |   17 +
 .../FALCON/0.5.0.2.1/metainfo.xml               |    7 +
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |    2 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |    5 +-
 .../FALCON/0.5.0.2.1/quicklinks/quicklinks.json |   35 +
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |    2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |    2 +-
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |    2 +-
 .../package/alerts/alert_metrics_deviation.py   |    2 +-
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   17 +
 .../HIVE/0.12.0.2.0/package/scripts/hcat.py     |    2 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |    2 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |    5 +-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |    2 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |    2 +-
 .../0.5.0/configuration/logfeeder-env.xml       |   14 +
 .../0.5.0/configuration/logsearch-env.xml       |   94 +-
 .../configuration/logsearch-properties.xml      |    2 +-
 .../LOGSEARCH/0.5.0/metainfo.xml                |   35 +-
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   79 +-
 .../0.5.0/package/scripts/setup_logfeeder.py    |    2 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |   13 +-
 .../0.5.0/properties/logfeeder-env.sh.j2        |    2 +-
 .../0.5.0/properties/logsearch-env.sh.j2        |    2 +-
 .../0.5.0/properties/logsearch-log4j.xml.j2     |    4 +-
 .../0.5.0/properties/output.config.json.j2      |    6 +-
 .../LOGSEARCH/0.5.0/themes/theme.json           |  253 +++
 .../RANGER/0.4.0/package/scripts/params.py      |    1 +
 .../0.4.0/package/scripts/ranger_admin.py       |   36 +-
 .../SQOOP/1.4.4.2.0/configuration/sqoop-env.xml |   17 +
 .../1.4.4.2.0/configuration/sqoop-site.xml      |    6 +
 .../1.4.4.2.0/package/scripts/params_linux.py   |    5 +-
 .../SQOOP/1.4.4.2.0/package/scripts/sqoop.py    |    2 +-
 .../STORM/0.10.0/configuration/storm-env.xml    |   17 +
 .../STORM/0.9.1/package/scripts/params_linux.py |    8 +-
 .../STORM/0.9.1/package/scripts/storm.py        |    2 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |   66 +-
 .../ZOOKEEPER/3.4.6/metainfo.xml                |    2 +-
 .../scripts/post-user-creation-hook.sh          |    7 +
 .../src/main/resources/scripts/stack_advisor.py |   11 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |   10 +
 .../before-ANY/scripts/shared_initialization.py |    6 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |    2 +-
 .../HDP/2.0.6/properties/stack_features.json    |    5 +
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |   47 +-
 .../stacks/HDP/2.1/services/stack_advisor.py    |   20 +-
 .../services/HIVE/configuration/hive-site.xml   |    4 +
 .../stacks/HDP/2.2/services/stack_advisor.py    |   62 +-
 .../services/STORM/configuration/storm-site.xml |    4 +
 .../stacks/HDP/2.3/services/stack_advisor.py    |   61 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |   27 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   27 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |   27 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |   27 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |    4 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |    4 +-
 .../services/HDFS/configuration/hadoop-env.xml  |  176 ++
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |   27 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |   27 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |   27 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |    4 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |    4 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |   12 +
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml |   27 +-
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   31 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |    3 +
 .../HDP/2.6/services/ACCUMULO/metainfo.xml      |    2 +-
 .../stacks/HDP/2.6/services/FALCON/metainfo.xml |    2 +-
 .../stacks/HDP/2.6/services/FLUME/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/HBASE/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/HDFS/metainfo.xml   |    2 +-
 .../stacks/HDP/2.6/services/HIVE/metainfo.xml   |    2 +-
 .../stacks/HDP/2.6/services/KAFKA/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/KNOX/metainfo.xml   |    2 +-
 .../stacks/HDP/2.6/services/MAHOUT/metainfo.xml |    2 +-
 .../stacks/HDP/2.6/services/OOZIE/metainfo.xml  |    1 +
 .../stacks/HDP/2.6/services/PIG/metainfo.xml    |    2 +-
 .../stacks/HDP/2.6/services/RANGER/metainfo.xml |    2 +-
 .../HDP/2.6/services/RANGER_KMS/metainfo.xml    |    2 +-
 .../stacks/HDP/2.6/services/SLIDER/metainfo.xml |    2 +-
 .../services/SPARK/configuration/livy-conf.xml  |   73 +
 .../configuration/livy-spark-blacklist.xml      |   52 +
 .../stacks/HDP/2.6/services/SPARK/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/SPARK2/metainfo.xml |    2 +-
 .../stacks/HDP/2.6/services/SQOOP/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/STORM/metainfo.xml  |    2 +-
 .../stacks/HDP/2.6/services/TEZ/metainfo.xml    |    2 +-
 .../stacks/HDP/2.6/services/YARN/metainfo.xml   |    4 +-
 .../HDP/2.6/services/ZOOKEEPER/metainfo.xml     |    2 +-
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |   27 +-
 .../HDP/3.0/configuration/cluster-env.xml       |  293 +++
 .../HDP/3.0/hooks/after-INSTALL/scripts/hook.py |   37 +
 .../3.0/hooks/after-INSTALL/scripts/params.py   |   97 +
 .../scripts/shared_initialization.py            |  111 ++
 .../hooks/before-ANY/files/changeToSecureUid.sh |   53 +
 .../HDP/3.0/hooks/before-ANY/scripts/hook.py    |   36 +
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  |  231 +++
 .../before-ANY/scripts/shared_initialization.py |  226 +++
 .../3.0/hooks/before-INSTALL/scripts/hook.py    |   37 +
 .../3.0/hooks/before-INSTALL/scripts/params.py  |  113 ++
 .../scripts/repo_initialization.py              |   68 +
 .../scripts/shared_initialization.py            |   37 +
 .../3.0/hooks/before-RESTART/scripts/hook.py    |   29 +
 .../hooks/before-START/files/checkForFormat.sh  |   65 +
 .../before-START/files/task-log4j.properties    |  134 ++
 .../hooks/before-START/files/topology_script.py |   66 +
 .../HDP/3.0/hooks/before-START/scripts/hook.py  |   39 +
 .../3.0/hooks/before-START/scripts/params.py    |  326 ++++
 .../before-START/scripts/rack_awareness.py      |   47 +
 .../scripts/shared_initialization.py            |  191 ++
 .../templates/commons-logging.properties.j2     |   43 +
 .../templates/exclude_hosts_list.j2             |   21 +
 .../templates/hadoop-metrics2.properties.j2     |  105 ++
 .../before-START/templates/health_check.j2      |   81 +
 .../templates/include_hosts_list.j2             |   21 +
 .../templates/topology_mappings.data.j2         |   24 +
 .../main/resources/stacks/HDP/3.0/kerberos.json |   78 +
 .../main/resources/stacks/HDP/3.0/metainfo.xml  |   24 +
 .../HDP/3.0/properties/stack_features.json      |  323 ++++
 .../stacks/HDP/3.0/properties/stack_tools.json  |    4 +
 .../resources/stacks/HDP/3.0/repos/repoinfo.xml |  132 ++
 .../services/HDFS/configuration/core-site.xml   |   56 +
 .../services/HDFS/configuration/hadoop-env.xml  |  200 ++
 .../services/HDFS/configuration/hdfs-log4j.xml  |  226 +++
 .../services/HDFS/configuration/hdfs-site.xml   |  153 ++
 .../HDFS/configuration/ranger-hdfs-audit.xml    |  217 +++
 .../ranger-hdfs-plugin-properties.xml           |   98 +
 .../configuration/ranger-hdfs-policymgr-ssl.xml |   67 +
 .../HDFS/configuration/ranger-hdfs-security.xml |   65 +
 .../services/HDFS/configuration/widgets.json    |  649 +++++++
 .../stacks/HDP/3.0/services/HDFS/kerberos.json  |  246 +++
 .../stacks/HDP/3.0/services/HDFS/metainfo.xml   |  190 ++
 .../services/HDFS/quicklinks/quicklinks.json    |   80 +
 .../HDP/3.0/services/HDFS/themes/theme.json     |  179 ++
 .../HDP/3.0/services/YARN/YARN_widgets.json     |  670 +++++++
 .../YARN/configuration-mapred/mapred-env.xml    |   51 +
 .../YARN/configuration-mapred/mapred-site.xml   |  134 ++
 .../YARN/configuration/capacity-scheduler.xml   |   71 +
 .../YARN/configuration/ranger-yarn-audit.xml    |  177 ++
 .../ranger-yarn-plugin-properties.xml           |   82 +
 .../configuration/ranger-yarn-policymgr-ssl.xml |   66 +
 .../YARN/configuration/ranger-yarn-security.xml |   58 +
 .../services/YARN/configuration/yarn-env.xml    |  200 ++
 .../services/YARN/configuration/yarn-log4j.xml  |  103 ++
 .../services/YARN/configuration/yarn-site.xml   |  814 +++++++++
 .../stacks/HDP/3.0/services/YARN/kerberos.json  |  278 +++
 .../stacks/HDP/3.0/services/YARN/metainfo.xml   |  173 ++
 .../YARN/quicklinks-mapred/quicklinks.json      |   80 +
 .../services/YARN/quicklinks/quicklinks.json    |   80 +
 .../3.0/services/YARN/themes-mapred/theme.json  |  132 ++
 .../HDP/3.0/services/YARN/themes/theme.json     |  250 +++
 .../HDP/3.0/services/ZOOKEEPER/metainfo.xml     |   54 +
 .../main/resources/stacks/HDP/3.0/widgets.json  |   95 +
 .../2.1/hooks/before-START/scripts/params.py    |    2 +-
 .../PERF/1.0/configuration/cluster-env.xml      |   27 +-
 .../resources/stacks/PERF/1.0/kerberos.json     |   78 +
 .../GRUMPY/configuration/grumpy-site.xml        |   12 +
 .../PERF/1.0/services/GRUMPY/kerberos.json      |   78 +
 .../services/GRUMPY/package/scripts/dwarf.py    |    4 +
 .../stacks/PERF/1.0/services/HAPPY/alerts.json  |   20 +
 .../HAPPY/configuration/happy-alert-config.xml  |   80 +
 .../services/HAPPY/configuration/happy-site.xml |   12 +
 .../PERF/1.0/services/HAPPY/kerberos.json       |   78 +
 .../stacks/PERF/1.0/services/HAPPY/metainfo.xml |    5 +
 .../HAPPY/package/alerts/alert_happy_process.py |   59 +
 .../1.0/services/HAPPY/package/scripts/dwarf.py |    4 +
 .../stacks/PERF/1.0/services/HBASE/alerts.json  |  110 +-
 .../HBASE/configuration/hbase-alert-config.xml  |   80 +
 .../stacks/PERF/1.0/services/HBASE/metainfo.xml |    1 +
 .../package/alerts/hbase_master_process.py      |   59 +
 .../alerts/hbase_regionserver_process.py        |   59 +
 .../HBASE/package/scripts/hbase_master.py       |    4 +
 .../HBASE/package/scripts/hbase_regionserver.py |    4 +
 .../package/scripts/phoenix_queryserver.py      |    4 +
 .../stacks/PERF/1.0/services/HDFS/alerts.json   | 1728 +-----------------
 .../HDFS/configuration/hdfs-alert-config.xml    |   80 +
 .../stacks/PERF/1.0/services/HDFS/metainfo.xml  |    1 +
 .../package/alerts/alert_checkpoint_time.py     |   38 +-
 .../alerts/alert_datanode_unmounted_data_dir.py |   47 +-
 .../package/alerts/alert_ha_namenode_health.py  |   75 -
 .../package/alerts/alert_metrics_deviation.py   |   85 -
 .../package/alerts/alert_nfs_gateway_process.py |   59 +
 .../package/alerts/alert_snamenode_process.py   |   59 +
 .../package/alerts/alert_upgrade_finalized.py   |   49 +-
 .../services/HDFS/package/scripts/datanode.py   |    4 +
 .../HDFS/package/scripts/journalnode.py         |    4 +
 .../services/HDFS/package/scripts/namenode.py   |    4 +
 .../services/HDFS/package/scripts/nfsgateway.py |    4 +
 .../services/HDFS/package/scripts/snamenode.py  |    4 +
 .../KERBEROS/configuration/kerberos-env.xml     |  380 ++++
 .../KERBEROS/configuration/krb5-conf.xml        |  109 ++
 .../PERF/1.0/services/KERBEROS/kerberos.json    |   17 +
 .../PERF/1.0/services/KERBEROS/metainfo.xml     |  123 ++
 .../KERBEROS/package/scripts/kerberos_client.py |   80 +
 .../KERBEROS/package/scripts/kerberos_common.py |  468 +++++
 .../services/KERBEROS/package/scripts/params.py |  200 ++
 .../KERBEROS/package/scripts/service_check.py   |   30 +
 .../KERBEROS/package/scripts/status_params.py   |   32 +
 .../services/KERBEROS/package/scripts/utils.py  |  105 ++
 .../KERBEROS/package/templates/krb5_conf.j2     |   54 +
 .../stacks/PERF/1.0/services/SLEEPY/alerts.json |   20 +
 .../configuration/sleepy-alert-config.xml       |   80 +
 .../SLEEPY/configuration/sleepy-site.xml        |   12 +
 .../PERF/1.0/services/SLEEPY/kerberos.json      |   78 +
 .../PERF/1.0/services/SLEEPY/metainfo.xml       |    5 +
 .../package/alerts/alert_sleepy_process.py      |   59 +
 .../services/SLEEPY/package/scripts/dwarf.py    |    4 +
 .../stacks/PERF/1.0/services/SNOW/alerts.json   |   20 +
 .../SNOW/configuration/snow-alert-config.xml    |   80 +
 .../services/SNOW/configuration/snow-site.xml   |   12 +
 .../stacks/PERF/1.0/services/SNOW/kerberos.json |   78 +
 .../stacks/PERF/1.0/services/SNOW/metainfo.xml  |    5 +
 .../SNOW/package/alerts/alert_snow_process.py   |   59 +
 .../services/SNOW/package/scripts/snow_white.py |    4 +
 .../stacks/PERF/1.0/services/YARN/alerts.json   |  361 +---
 .../YARN/configuration/yarn-alert-config.xml    |   80 +
 .../stacks/PERF/1.0/services/YARN/metainfo.xml  |    3 +
 .../package/alerts/alert_history_process.py     |   59 +
 .../package/alerts/alert_nodemanager_health.py  |   36 +-
 .../alerts/alert_nodemanagers_summary.py        |   68 -
 .../alerts/alert_resourcemanager_process.py     |   59 +
 .../package/alerts/alert_timeline_process.py    |   59 +
 .../scripts/application_timeline_server.py      |    4 +
 .../YARN/package/scripts/historyserver.py       |    4 +
 .../YARN/package/scripts/nodemanager.py         |    4 +
 .../YARN/package/scripts/resourcemanager.py     |    4 +
 .../PERF/1.0/services/ZOOKEEPER/alerts.json     |   20 +
 .../ZOOKEEPER/configuration/zk-alert-config.xml |   80 +
 .../PERF/1.0/services/ZOOKEEPER/kerberos.json   |   39 +
 .../PERF/1.0/services/ZOOKEEPER/metainfo.xml    |    4 +
 .../package/alerts/alert_zk_server_process.py   |   59 +
 .../package/scripts/zookeeper_server.py         |    4 +
 .../src/main/resources/stacks/stack_advisor.py  |  209 ++-
 .../src/main/resources/upgrade-pack.xsd         |   18 +-
 .../ExecutionCommandWrapperTest.java            |   17 +-
 .../TestActionSchedulerThreading.java           |   19 +-
 .../server/agent/HeartbeatTestHelper.java       |    6 +-
 .../server/agent/TestHeartbeatMonitor.java      |   13 +-
 .../configuration/RecoveryConfigHelperTest.java |    2 +-
 .../AmbariManagementControllerImplTest.java     |   96 +-
 .../AmbariManagementControllerTest.java         |  118 +-
 .../BlueprintConfigurationProcessorTest.java    |   11 +
 .../internal/CalculatedStatusTest.java          |   31 +
 .../internal/RequestResourceProviderTest.java   |   12 +-
 .../UpgradeResourceProviderHDP22Test.java       |   14 +-
 .../internal/UpgradeResourceProviderTest.java   |   13 +-
 .../LogSearchDataRetrievalServiceTest.java      |  142 +-
 .../logging/LoggingRequestHelperImplTest.java   |    5 +
 .../server/orm/InMemoryDefaultTestModule.java   |    5 +
 .../server/orm/dao/ServiceConfigDAOTest.java    |  144 +-
 .../ldap/AmbariLdapDataPopulatorTest.java       |    3 +-
 .../ComponentVersionCheckActionTest.java        |   19 +-
 .../upgrades/ConfigureActionTest.java           |   96 +-
 .../upgrades/FixOozieAdminUsersTest.java        |   76 +-
 .../HBaseEnvMaxDirectMemorySizeActionTest.java  |  187 +-
 .../upgrades/HiveEnvClasspathActionTest.java    |  148 +-
 .../upgrades/HiveZKQuorumConfigActionTest.java  |    2 +-
 .../upgrades/KerberosKeytabsActionTest.java     |   28 +-
 .../upgrades/RangerConfigCalculationTest.java   |   72 +-
 .../RangerKerberosConfigCalculationTest.java    |  173 +-
 .../upgrades/RangerKmsProxyConfigTest.java      |   36 +-
 .../SparkShufflePropertyConfigTest.java         |   30 +-
 .../upgrades/UpgradeActionTest.java             |   28 +-
 .../ambari/server/state/ConfigGroupTest.java    |   26 +-
 .../ambari/server/state/ConfigHelperTest.java   |   49 +-
 .../ambari/server/state/UpgradeHelperTest.java  |   39 +-
 .../state/alerts/AlertReceivedListenerTest.java |    8 +-
 .../state/cluster/ClusterDeadlockTest.java      |   17 +-
 .../server/state/cluster/ClusterTest.java       |  228 ++-
 .../server/state/cluster/ClustersTest.java      |    8 +-
 ...omponentHostConcurrentWriteDeadlockTest.java |    9 +-
 .../ambari/server/state/host/HostTest.java      |    6 +-
 .../server/state/stack/UpgradePackTest.java     |   22 +
 .../svccomphost/ServiceComponentHostTest.java   |   24 +-
 .../server/topology/AmbariContextTest.java      |   38 +-
 .../server/update/HostUpdateHelperTest.java     |   40 +-
 .../server/upgrade/UpgradeCatalog210Test.java   |    2 +
 .../server/upgrade/UpgradeCatalog250Test.java   |   98 +
 .../ambari/server/utils/RequestUtilsTest.java   |   18 +
 .../ambari/server/utils/StageUtilsTest.java     |    4 +
 ambari-server/src/test/python/TestMpacks.py     |  102 +-
 .../2.0.6/YARN/test_yarn_service_check.py       |  111 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |   16 +-
 .../stacks/2.1/common/test_stack_advisor.py     |    2 +
 .../stacks/2.2/common/test_stack_advisor.py     |   46 +-
 .../stacks/2.3/common/test_stack_advisor.py     |   57 +-
 .../stacks/2.4/AMBARI_INFRA/test_infra_solr.py  |    2 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |    2 +-
 .../test/python/stacks/2.4/configs/default.json |    2 +-
 .../stacks/2.6/RANGER/test_ranger_admin.py      |  504 +++++
 .../2.6/configs/ranger-admin-default.json       |  704 +++++++
 .../2.6/configs/ranger-admin-secured.json       |  773 ++++++++
 .../test/python/stacks/test_stack_adviser.py    |  239 +++
 .../src/test/resources/dashboards/README.txt    |   18 +
 .../stacks/HDP/0.1/services/HDFS/metainfo.xml   |    1 +
 .../GANGLIA/upgrades/HDP/rolling-upgrade.xml    |   43 +
 .../app/controllers/global/update_controller.js |    5 +-
 .../journalNode/step1_controller.js             |   26 +-
 .../journalNode/step4_controller.js             |    6 +-
 .../journalNode/wizard_controller.js            |   28 +-
 .../nameNode/step7_controller.js                |   14 +-
 .../nameNode/step9_controller.js                |   17 +-
 .../resourceManager/step4_controller.js         |    5 +-
 .../main/admin/stack_and_upgrade_controller.js  |   13 +-
 .../admin/stack_upgrade_history_controller.js   |    2 +-
 .../controllers/main/service/info/summary.js    |   37 +-
 .../main/service/reassign/step4_controller.js   |   39 +-
 .../main/service/reassign/step6_controller.js   |   19 -
 .../app/controllers/wizard/step3_controller.js  |    2 +-
 ambari-web/app/data/HDP2/site_properties.js     |   36 +-
 .../app/mappers/stack_upgrade_history_mapper.js |    2 +-
 ambari-web/app/messages.js                      |    9 +-
 ambari-web/app/mixins.js                        |    1 -
 .../common/widgets/export_metrics_mixin.js      |   28 +-
 .../mixins/wizard/assign_master_components.js   |    1 +
 .../wizard/wizardProgressPageController.js      |   20 +
 .../configs/objects/service_config_property.js  |    7 -
 ambari-web/app/models/stack_service.js          |    4 +-
 .../stack_version/stack_upgrade_history.js      |    2 +-
 .../app/routes/manage_journalnode_routes.js     |    4 +-
 ambari-web/app/styles/stack_versions.less       |   11 +-
 .../highAvailability/journalNode/wizard.hbs     |    8 +-
 .../admin/stack_upgrade/upgrade_options.hbs     |   61 +-
 .../main/service/info/service_alert_popup.hbs   |   15 +-
 .../app/utils/configs/config_initializer.js     |   28 +-
 .../mount_points_based_initializer_mixin.js     |  340 ----
 .../widgets/slider_config_widget_view.js        |    7 +-
 .../highAvailability/journalNode/step2_view.js  |    5 +-
 .../upgrade_history_details_view.js             |    2 +-
 .../admin/stack_upgrade/upgrade_history_view.js |  137 +-
 .../admin/stack_upgrade/upgrade_wizard_view.js  |    6 +-
 .../service/reassign/step4_controller_test.js   |    2 +-
 ambari-web/test/models/stack_service_test.js    |   71 +-
 ambari-web/test/utils/ajax/ajax_test.js         |    9 +-
 .../utils/configs/config_initializer_test.js    |  457 -----
 .../stack_upgrade/upgrade_wizard_view_test.js   |    2 +-
 contrib/utils/perf/deploy-gce-perf-cluster.py   |   13 +-
 .../src/main/resources/ui/app/app.js            |   14 +-
 .../src/main/resources/view.xml                 |   23 +-
 contrib/views/files/src/main/resources/view.xml |    7 +
 .../views/hive-next/src/main/resources/view.xml |    7 +
 .../src/main/resources/ui/pig-web/app/app.js    |   14 +-
 contrib/views/pom.xml                           |    1 -
 contrib/views/tez/src/main/resources/view.xml   |    7 +
 .../ui/app/templates/components/job-details.hbs |    2 +-
 contrib/views/zeppelin/pom.xml                  |  190 --
 .../view/zeppelin/ZeppelinServiceCheck.java     |   55 -
 .../ambari/view/zeppelin/ZeppelinServlet.java   |  113 --
 .../zeppelin/src/main/resources/WEB-INF/web.xml |   40 -
 .../src/main/resources/view.log4j.properties    |   27 -
 .../views/zeppelin/src/main/resources/view.xml  |   48 -
 pom.xml                                         |    1 +
 utility/pom.xml                                 |   33 +-
 ...AvoidTransactionalOnPrivateMethodsCheck.java |   55 +
 .../src/main/resources/checkstyle_packages.xml  |   15 +
 ...dTransactionalOnPrivateMethodsCheckTest.java |   49 +
 .../InputTransactionalOnPrivateMethods.java     |   46 +
 477 files changed, 20800 insertions(+), 6990 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
index c83004c..0ad2af3 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
@@ -30,57 +30,6 @@ angular.module('ambariAdminConsole')
       'CLUSTER.USER'
     ],
 
-    orderedAuthorizations : [
-      "SERVICE.VIEW_METRICS",
-      "SERVICE.VIEW_STATUS_INFO",
-      "SERVICE.VIEW_CONFIGS",
-      "SERVICE.COMPARE_CONFIGS",
-      "SERVICE.VIEW_ALERTS",
-      "SERVICE.START_STOP",
-      "SERVICE.DECOMMISSION_RECOMMISSION",
-      "SERVICE.RUN_SERVICE_CHECK",
-      "SERVICE.TOGGLE_MAINTENANCE",
-      "SERVICE.RUN_CUSTOM_COMMAND",
-      "SERVICE.MODIFY_CONFIGS",
-      "SERVICE.MANAGE_CONFIG_GROUPS",
-      "SERVICE.MOVE",
-      "SERVICE.ENABLE_HA",
-      "SERVICE.MANAGE_ALERTS",
-      "SERVICE.TOGGLE_ALERTS",
-      "SERVICE.ADD_DELETE_SERVICES",
-      "SERVICE.VIEW_OPERATIONAL_LOGS",
-      "HOST.VIEW_CONFIGS",
-      "HOST.VIEW_METRICS",
-      "HOST.VIEW_STATUS_INFO",
-      "HOST.ADD_DELETE_COMPONENTS",
-      "HOST.ADD_DELETE_HOSTS",
-      "HOST.TOGGLE_MAINTENANCE",
-      "CLUSTER.VIEW_ALERTS",
-      "CLUSTER.VIEW_CONFIGS",
-      "CLUSTER.VIEW_METRICS",
-      "CLUSTER.VIEW_STACK_DETAILS",
-      "CLUSTER.VIEW_STATUS_INFO",
-      "CLUSTER.MANAGE_ALERTS",
-      "CLUSTER.MANAGE_CONFIG_GROUPS",
-      "CLUSTER.MANAGE_CREDENTIALS",
-      "CLUSTER.MODIFY_CONFIGS",
-      "CLUSTER.TOGGLE_ALERTS",
-      "CLUSTER.TOGGLE_KERBEROS",
-      "CLUSTER.UPGRADE_DOWNGRADE_STACK",
-      "CLUSTER.RUN_CUSTOM_COMMAND",
-      "AMBARI.ADD_DELETE_CLUSTERS",
-      "AMBARI.ASSIGN_ROLES",
-      "AMBARI.EDIT_STACK_REPOS",
-      "AMBARI.MANAGE_GROUPS",
-      "AMBARI.MANAGE_SETTINGS",
-      "AMBARI.MANAGE_STACK_VERSIONS",
-      "AMBARI.MANAGE_USERS",
-      "AMBARI.MANAGE_VIEWS",
-      "AMBARI.RENAME_CLUSTER",
-      "AMBARI.RUN_CUSTOM_COMMAND",
-      "SERVICE.SET_SERVICE_USERS_GROUPS"
-    ],
-
     orderedLevels: ['SERVICE', 'HOST', 'CLUSTER', 'AMBARI'],
 
     ineditableRoles : ['VIEW.USER', 'AMBARI.ADMINISTRATOR'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/RoleDetailsModal.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/RoleDetailsModal.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/RoleDetailsModal.js
index 5a14b33..06019c2 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/RoleDetailsModal.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/RoleDetailsModal.js
@@ -33,9 +33,9 @@ angular.module('ambariAdminConsole')
         templateUrl: 'views/modals/RoleDetailsModal.html',
         size: 'lg',
         controller: function($scope, $modalInstance) {
+          var authorizationsOrder;
           $scope.title = '';
           $scope.orderedRoles = ['AMBARI.ADMINISTRATOR'].concat(Cluster.orderedRoles).reverse();
-          $scope.orderedAuthorizations = Cluster.orderedAuthorizations;
           $scope.orderedLevels = Cluster.orderedLevels;
           $scope.authHash = {};
           $scope.getLevelName = function (key) {
@@ -44,25 +44,34 @@ angular.module('ambariAdminConsole')
           angular.forEach(roles, function (r) {
             angular.forEach(r.authorizations, function (auth) {
               var match = auth.authorization_id.match(/(\w+)\./),
-                levelKey = match && match[1],
-                isLevelDisplayed = $scope.orderedAuthorizations.some(function (item) {
-                  return !item.indexOf(levelKey);
-                });
+                  levelKey = match && match[1],
+                  isLevelDisplayed = $scope.orderedLevels.indexOf(levelKey) !== -1;
               if (isLevelDisplayed) {
                 if (!$scope.authHash[levelKey]) {
                   $scope.authHash[levelKey] = {};
                 }
                 if (!$scope.authHash[levelKey][auth.authorization_id]) {
-                  $scope.authHash[levelKey][auth.authorization_id] = auth.authorization_name;
+                  $scope.authHash[levelKey][auth.authorization_id] = {
+                    name: auth.authorization_name,
+                    roles: {}
+                  };
                 }
-                if (!r.authHash) {
-                  r.authHash = {};
-                }
-                r.authHash[auth.authorization_id] = true;
+                $scope.authHash[levelKey][auth.authorization_id].roles[r.permission_name] = true;
               }
             });
           });
-          $scope.roles = roles.sort(function(a, b) {
+
+          // sort authorizations for each level by number of roles permissions
+          for (var level in $scope.authHash) {
+            if ($scope.authHash.hasOwnProperty(level)) {
+              authorizationsOrder = Object.keys($scope.authHash[level]).sort(function (a, b) {
+                return Object.keys($scope.authHash[level][b].roles).length - Object.keys($scope.authHash[level][a].roles).length;
+              });
+              $scope.authHash[level].order = authorizationsOrder;
+            }
+          }
+
+          $scope.roles = roles.sort(function (a, b) {
             return $scope.orderedRoles.indexOf(a.permission_name) - $scope.orderedRoles.indexOf(b.permission_name);
           });
           $scope.ok = function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-admin/src/main/resources/ui/admin-web/app/views/modals/RoleDetailsModal.html
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/views/modals/RoleDetailsModal.html b/ambari-admin/src/main/resources/ui/admin-web/app/views/modals/RoleDetailsModal.html
index 926bea9..942a733 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/views/modals/RoleDetailsModal.html
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/views/modals/RoleDetailsModal.html
@@ -40,10 +40,10 @@
         </div>
       </div>
       <div class="table-row-wrapper">
-        <div class="table-row" ng-repeat="auth in orderedAuthorizations" ng-if="authHash[level][auth]">
-          <div class="table-cell">{{authHash[level][auth]}}</div>
+        <div class="table-row" ng-repeat="auth_id in authHash[level].order">
+          <div class="table-cell">{{authHash[level][auth_id].name}}</div>
           <div class="table-cell text-center" ng-repeat="role in roles">
-            <i class="glyphicon glyphicon-ok green-icon" ng-show="role.authHash[auth]"></i>
+            <i class="glyphicon glyphicon-ok green-icon" ng-show="authHash[level][auth_id].roles[role.permission_name]"></i>
           </div>
         </div>
       </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/conf/unix/ambari-agent.ini
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent.ini b/ambari-agent/conf/unix/ambari-agent.ini
index 43740ad..c1d4c02 100644
--- a/ambari-agent/conf/unix/ambari-agent.ini
+++ b/ambari-agent/conf/unix/ambari-agent.ini
@@ -39,6 +39,7 @@ alert_kinit_timeout=14400000
 system_resource_overrides=/etc/resource_overrides
 ; memory_threshold_soft_mb=400
 ; memory_threshold_hard_mb=1000
+; ignore_mount_points=/mnt/custom1,/mnt/custom2
 
 [security]
 keysdir=/var/lib/ambari-agent/keys

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 3ec0621..cc10728 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -294,6 +294,7 @@ class ActionQueue(threading.Thread):
 
     logger.info("Command execution metadata - taskId = {taskId}, retry enabled = {retryAble}, max retry duration (sec) = {retryDuration}, log_output = {log_command_output}".
                  format(taskId=taskId, retryAble=retryAble, retryDuration=retryDuration, log_command_output=log_command_output))
+    command_canceled = False
     while retryDuration >= 0:
       numAttempts += 1
       start = 0
@@ -322,6 +323,7 @@ class ActionQueue(threading.Thread):
           status = self.FAILED_STATUS
           if (commandresult['exitcode'] == -signal.SIGTERM) or (commandresult['exitcode'] == -signal.SIGKILL):
             logger.info('Command with taskId = {cid} was canceled!'.format(cid=taskId))
+            command_canceled = True
             break
 
       if status != self.COMPLETED_STATUS and retryAble and retryDuration > 0:
@@ -338,6 +340,15 @@ class ActionQueue(threading.Thread):
                     .format(cid=taskId, status=status, retryAble=retryAble, retryDuration=retryDuration, delay=delay))
         break
 
+    # do not fail task which was rescheduled from server
+    if command_canceled:
+      with self.commandQueue.mutex:
+        for com in self.commandQueue.queue:
+          if com['taskId'] == command['taskId']:
+            logger.info('Command with taskId = {cid} was rescheduled by server. '
+                        'Fail report on cancelled command won\'t be sent with heartbeat.'.format(cid=taskId))
+            return
+
     # final result to stdout
     commandresult['stdout'] += '\n\nCommand completed successfully!\n' if status == self.COMPLETED_STATUS else '\n\nCommand failed after ' + str(numAttempts) + ' tries\n'
     logger.info('Command with taskId = {cid} completed successfully!'.format(cid=taskId) if status == self.COMPLETED_STATUS else 'Command with taskId = {cid} failed after {attempts} tries'.format(cid=taskId, attempts=numAttempts))

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 7d61611..11c8cbe 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -154,12 +154,7 @@ class CustomServiceOrchestrator():
         self.file_cache.get_host_scripts_base_dir(server_url_prefix)          
         hook_dir = self.file_cache.get_hook_base_dir(command, server_url_prefix)
         base_dir = self.file_cache.get_service_base_dir(command, server_url_prefix)
-        from ActionQueue import ActionQueue  # To avoid cyclic dependency
-        if self.COMMAND_TYPE in command and command[self.COMMAND_TYPE] == ActionQueue.EXECUTION_COMMAND:
-          logger.info("Found it - " + str(command[self.COMMAND_TYPE]) + " yeah")
-          # limiting to only EXECUTION_COMMANDs for now
-          # TODO need a design for limiting to specific role/component such as METRICS_GRAFANA
-          self.file_cache.get_dashboard_base_dir(server_url_prefix)
+        self.file_cache.get_custom_resources_subdir(command, server_url_prefix)
 
         script_path = self.resolve_script_path(base_dir, script)
         script_tuple = (script_path, base_dir)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/main/python/ambari_agent/FileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index 83ac373..a9ea8f6 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -45,7 +45,6 @@ class FileCache():
   STACKS_CACHE_DIRECTORY="stacks"
   COMMON_SERVICES_DIRECTORY="common-services"
   CUSTOM_ACTIONS_CACHE_DIRECTORY="custom_actions"
-  DASHBOARD_DIRECTORY="dashboards"
   HOST_SCRIPTS_CACHE_DIRECTORY="host_scripts"
   HASH_SUM_FILE=".hash"
   ARCHIVE_NAME="archive.zip"
@@ -100,12 +99,17 @@ class FileCache():
                                   server_url_prefix)
 
 
-  def get_dashboard_base_dir(self, server_url_prefix):
+  def get_custom_resources_subdir(self, command, server_url_prefix):
     """
-    Returns a base directory for dashboards
+    Returns a custom directory which must be a subdirectory of the resources dir
     """
+    try:
+      custom_dir = command['commandParams']['custom_folder']
+    except KeyError:
+      return None
+
     return self.provide_directory(self.cache_dir,
-                                  self.DASHBOARD_DIRECTORY,
+                                  custom_dir,
                                   server_url_prefix)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/main/python/ambari_agent/Hardware.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/Hardware.py b/ambari-agent/src/main/python/ambari_agent/Hardware.py
index 3c94d28..0d431a3 100644
--- a/ambari-agent/src/main/python/ambari_agent/Hardware.py
+++ b/ambari-agent/src/main/python/ambari_agent/Hardware.py
@@ -41,6 +41,7 @@ class Hardware:
   CHECK_REMOTE_MOUNTS_TIMEOUT_DEFAULT = '10'
   IGNORE_ROOT_MOUNTS = ["proc", "dev", "sys"]
   IGNORE_DEVICES = ["proc", "tmpfs", "cgroup", "mqueue", "shm"]
+  LINUX_PATH_SEP = "/"
 
   def __init__(self, config):
     self.hardware = {
@@ -88,6 +89,37 @@ class Hardware:
     return True
 
   @classmethod
+  def _is_mount_blacklisted(cls, blacklist, mount_point):
+    """
+    Verify if particular mount point is in the black list.
+
+    :return True if mount_point or a part of mount point is in the blacklist, otherwise return False
+
+     Example:
+       Mounts: /, /mnt/my_mount, /mnt/my_mount/sub_mount
+       Blacklist: /mnt/my_mount
+       Result: /
+
+    :type blacklist list
+    :type mount_point str
+    :rtype bool
+    """
+
+    if not blacklist or not mount_point:
+      return False
+
+    mount_point_elements = mount_point.split(cls.LINUX_PATH_SEP)
+
+    for el in blacklist:
+      el_list = el.split(cls.LINUX_PATH_SEP)
+      # making patch elements comparision
+      if el_list == mount_point_elements[:len(el_list)]:
+        return True
+
+    return False
+
+
+  @classmethod
   @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT)
   def osdisks(cls, config=None):
     """ Run df to find out the disks on the host. Only works on linux
@@ -95,6 +127,11 @@ class Hardware:
     and any mounts with spaces. """
     timeout = cls._get_mount_check_timeout(config)
     command = ["timeout", timeout, "df", "-kPT"]
+    blacklisted_mount_points = []
+
+    if config:
+      ignore_mount_value = config.get("agent", "ignore_mount_points", default="")
+      blacklisted_mount_points = [item.strip() for item in ignore_mount_value.split(",")]
 
     if not cls._check_remote_mounts(config):
       command.append("-l")
@@ -103,6 +140,7 @@ class Hardware:
     dfdata = df.communicate()[0]
     mounts = [cls._parse_df_line(line) for line in dfdata.splitlines() if line]
     result_mounts = []
+    ignored_mounts = []
 
     for mount in mounts:
       if not mount:
@@ -113,13 +151,21 @@ class Hardware:
        - mounted device is not in the ignored list
        - is accessible to user under which current process running
        - it is not file-mount (docker environment)
+       - mount path or a part of mount path is not in the blacklist
       """
-      if mount["device"] not in cls.IGNORE_DEVICES and \
+      if mount["device"] not in cls.IGNORE_DEVICES and\
          mount["mountpoint"].split("/")[0] not in cls.IGNORE_ROOT_MOUNTS and\
-         cls._chk_writable_mount(mount['mountpoint']) and \
-         not path_isfile(mount["mountpoint"]):
+         cls._chk_writable_mount(mount['mountpoint']) and\
+         not path_isfile(mount["mountpoint"]) and\
+         not cls._is_mount_blacklisted(blacklisted_mount_points, mount["mountpoint"]):
 
         result_mounts.append(mount)
+      else:
+        ignored_mounts.append(mount)
+
+      if len(ignored_mounts) > 0:
+        ignore_list = [el["mountpoint"] for el in ignored_mounts]
+        logger.info("Some mount points was ignored: {0}".format(', '.join(ignore_list)))
 
     return result_mounts
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/main/python/ambari_agent/HostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index 338daac..3810b52 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -121,12 +121,12 @@ class HostInfo(object):
     return False
 
 def get_ntp_service():
-  if OSCheck.is_redhat_family() and int(OSCheck.get_os_major_version()) >= 7:
-    return ("chronyd", "ntpd",)
-  elif OSCheck.is_redhat_family():
-    return ("ntpd",)
-  elif OSCheck.is_suse_family() or OSCheck.is_ubuntu_family():
-    return ("ntp",)
+  if OSCheck.is_redhat_family():
+    return ("ntpd", "chronyd",)
+  elif OSCheck.is_suse_family():
+    return ("ntpd", "ntp",)
+  elif OSCheck.is_ubuntu_family():
+    return ("ntp", "chrony",)
 
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index 7d04d42..d5dde8b 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -27,6 +27,7 @@ import os, errno, time, pprint, tempfile, threading
 import sys
 from threading import Thread
 import copy
+import signal
 
 from mock.mock import patch, MagicMock, call
 from ambari_agent.CustomServiceOrchestrator import CustomServiceOrchestrator
@@ -703,6 +704,53 @@ class TestActionQueue(TestCase):
     report = actionQueue.result()
     self.assertEqual(len(report['reports']), 0)
 
+  def test_cancel_with_reschedule_command(self):
+    config = AmbariConfig()
+    tempdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tempdir)
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
+    unfreeze_flag = threading.Event()
+    python_execution_result_dict = {
+      'stdout': 'out',
+      'stderr': 'stderr',
+      'structuredOut' : '',
+      'status' : '',
+      'exitcode' : -signal.SIGTERM
+    }
+
+    def side_effect(command, tmpoutfile, tmperrfile, override_output_files=True, retry=False):
+      unfreeze_flag.wait()
+      return python_execution_result_dict
+    def patched_aq_execute_command(command):
+      # We have to perform patching for separate thread in the same thread
+      with patch.object(CustomServiceOrchestrator, "runCommand") as runCommand_mock:
+        runCommand_mock.side_effect = side_effect
+        actionQueue.execute_command(command)
+
+    # We call method in a separate thread
+    execution_thread = Thread(target = patched_aq_execute_command ,
+                              args = (self.datanode_install_command, ))
+    execution_thread.start()
+    #  check in progress report
+    # wait until ready
+    while True:
+      time.sleep(0.1)
+      report = actionQueue.result()
+      if len(report['reports']) != 0:
+        break
+
+    unfreeze_flag.set()
+    # wait until ready
+    while len(report['reports']) != 0:
+      time.sleep(0.1)
+      report = actionQueue.result()
+
+    # check report
+    self.assertEqual(len(report['reports']), 0)
+
 
   @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
   @patch.object(CustomServiceOrchestrator, "runCommand")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index 0304adc..563d250 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -68,7 +68,7 @@ class TestCustomServiceOrchestrator(TestCase):
   def test_add_reg_listener_to_controller(self, FileCache_mock):
     FileCache_mock.return_value = None
     dummy_controller = MagicMock()
-    config = AmbariConfig().getConfig()
+    config = AmbariConfig()
     tempdir = tempfile.gettempdir()
     config.set('agent', 'prefix', tempdir)
     CustomServiceOrchestrator(config, dummy_controller)
@@ -204,7 +204,7 @@ class TestCustomServiceOrchestrator(TestCase):
   def test_resolve_script_path(self, FileCache_mock, exists_mock):
     FileCache_mock.return_value = None
     dummy_controller = MagicMock()
-    config = AmbariConfig().getConfig()
+    config = AmbariConfig()
     orchestrator = CustomServiceOrchestrator(config, dummy_controller)
     # Testing existing path
     exists_mock.return_value = True
@@ -220,7 +220,7 @@ class TestCustomServiceOrchestrator(TestCase):
     except AgentException:
       pass # Expected
 
-  @patch.object(FileCache, "get_dashboard_base_dir")
+  @patch.object(FileCache, "get_custom_resources_subdir")
   @patch.object(CustomServiceOrchestrator, "resolve_script_path")
   @patch.object(CustomServiceOrchestrator, "resolve_hook_script_path")
   @patch.object(FileCache, "get_host_scripts_base_dir")
@@ -235,7 +235,7 @@ class TestCustomServiceOrchestrator(TestCase):
                       get_host_scripts_base_dir_mock, 
                       resolve_hook_script_path_mock, 
                       resolve_script_path_mock,
-                      get_dashboard_base_dir_mock):
+                      get_custom_resources_subdir_mock):
     
     FileCache_mock.return_value = None
     command = {
@@ -267,7 +267,6 @@ class TestCustomServiceOrchestrator(TestCase):
     unix_process_id = 111
     orchestrator.commands_in_progress = {command['taskId']: unix_process_id}
     get_hook_base_dir_mock.return_value = "/hooks/"
-    get_dashboard_base_dir_mock.return_value = "/dashboards/"
     # normal run case
     run_file_mock.return_value = {
         'stdout' : 'sss',
@@ -278,7 +277,6 @@ class TestCustomServiceOrchestrator(TestCase):
     self.assertEqual(ret['exitcode'], 0)
     self.assertTrue(run_file_mock.called)
     self.assertEqual(run_file_mock.call_count, 3)
-    self.assertTrue(get_dashboard_base_dir_mock.called)
 
     run_file_mock.reset_mock()
 
@@ -301,25 +299,6 @@ class TestCustomServiceOrchestrator(TestCase):
 
     run_file_mock.reset_mock()
 
-    # For role=METRICS_GRAFANA, dashboards should be sync'd
-    command['role'] = 'METRICS_GRAFANA'
-    get_dashboard_base_dir_mock.reset_mock()
-    get_dashboard_base_dir_mock.return_value = "/dashboards/"
-
-    run_file_mock.return_value = {
-        'stdout' : 'sss',
-        'stderr' : 'eee',
-        'exitcode': 0,
-      }
-    ret = orchestrator.runCommand(command, "out.txt", "err.txt")
-    self.assertEqual(ret['exitcode'], 0)
-    self.assertTrue(run_file_mock.called)
-    self.assertEqual(run_file_mock.call_count, 3)
-    self.assertTrue(get_dashboard_base_dir_mock.called)
-
-    command['role'] = 'REGION_SERVER'
-    run_file_mock.reset_mock()
-
     # unknown script type case
     command['commandParams']['script_type'] = "SOME_TYPE"
     ret = orchestrator.runCommand(command, "out.txt", "err.txt")
@@ -332,7 +311,6 @@ class TestCustomServiceOrchestrator(TestCase):
 
     pass
 
-  @patch.object(FileCache, "get_dashboard_base_dir")
   @patch("ambari_commons.shell.kill_process_with_children")
   @patch.object(CustomServiceOrchestrator, "resolve_script_path")
   @patch.object(CustomServiceOrchestrator, "resolve_hook_script_path")
@@ -347,8 +325,7 @@ class TestCustomServiceOrchestrator(TestCase):
                       get_hook_base_dir_mock, get_service_base_dir_mock,
                       get_host_scripts_base_dir_mock,
                       resolve_hook_script_path_mock, resolve_script_path_mock,
-                      kill_process_with_children_mock,
-                      get_dashboard_base_dir_mock):
+                      kill_process_with_children_mock):
     FileCache_mock.return_value = None
     command = {
       'role' : 'REGION_SERVER',
@@ -378,7 +355,6 @@ class TestCustomServiceOrchestrator(TestCase):
     unix_process_id = 111
     orchestrator.commands_in_progress = {command['taskId']: unix_process_id}
     get_hook_base_dir_mock.return_value = "/hooks/"
-    get_dashboard_base_dir_mock.return_value = "/dashboards/"
     run_file_mock_return_value = {
       'stdout' : 'killed',
       'stderr' : 'killed',

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
index fbefc2b..00f6b69 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
@@ -119,10 +119,16 @@ class TestFileCache(TestCase):
 
 
   @patch.object(FileCache, "provide_directory")
-  def test_get_dashboard_base_dir(self, provide_directory_mock):
+  def test_get_custom_resources_subdir(self, provide_directory_mock):
     provide_directory_mock.return_value = "dummy value"
     fileCache = FileCache(self.config)
-    res = fileCache.get_dashboard_base_dir("server_url_pref")
+    command = {
+      'commandParams': {
+        'custom_folder' : 'dashboards'
+      }
+    }
+
+    res = fileCache.get_custom_resources_subdir(command, "server_url_pref")
     self.assertEquals(
       pprint.pformat(provide_directory_mock.call_args_list[0][0]),
       "('/var/lib/ambari-agent/cache', 'dashboards', 'server_url_pref')")

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-agent/src/test/python/ambari_agent/TestHardware.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHardware.py b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
index 038b2f8..ff3b40b 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHardware.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHardware.py
@@ -25,6 +25,7 @@ from mock.mock import patch, MagicMock, Mock
 import unittest
 import platform
 import socket
+import os
 from only_for_platform import not_for_platform, PLATFORM_WINDOWS
 from ambari_agent import hostname
 from ambari_agent.Hardware import Hardware
@@ -373,6 +374,75 @@ SwapFree:        1598676 kB
     self.assertEquals(2, json_mock.call_count)
     self.assertEquals('value', result['key'])
 
+  @patch.object(Hardware, "_chk_writable_mount")
+  @patch("ambari_agent.Hardware.path_isfile")
+  def test_osdisks_blacklist(self, isfile_mock, chk_writable_mount_mock):
+    df_output = \
+      """Filesystem                                                                                        Type  1024-blocks     Used Available Capacity Mounted on
+      /dev/mapper/docker-253:0-4980899-d45c264d37ab18c8ed14f890f4d59ac2b81e1c52919eb36a79419787209515f3 xfs      31447040  1282384  30164656       5% /
+      tmpfs                                                                                             tmpfs    32938336        4  32938332       1% /dev
+      tmpfs                                                                                             tmpfs    32938336        0  32938336       0% /sys/fs/cgroup
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/resolv.conf
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/hostname
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /etc/hosts
+      shm                                                                                               tmpfs       65536        0     65536       0% /dev/shm
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /run/secrets
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /mnt/blacklisted_mount
+      /dev/mapper/fedora-root                                                                           ext4    224161316 12849696 199901804       7% /mnt/blacklisted_mount/sub-dir
+      """
+
+    def isfile_side_effect(path):
+      assume_files = ["/etc/resolv.conf", "/etc/hostname", "/etc/hosts"]
+      return path in assume_files
+
+    def chk_writable_mount_side_effect(path):
+      assume_read_only = ["/run/secrets"]
+      return path not in assume_read_only
+
+    isfile_mock.side_effect = isfile_side_effect
+    chk_writable_mount_mock.side_effect = chk_writable_mount_side_effect
+
+    config_dict = {
+      "agent": {
+        "ignore_mount_points": "/mnt/blacklisted_mount"
+      }
+    }
+
+    with patch("subprocess.Popen") as open_mock:
+      proc_mock = Mock()
+      attr = {
+        'communicate.return_value': [
+          df_output
+        ]
+      }
+      proc_mock.configure_mock(**attr)
+      open_mock.return_value = proc_mock
+
+      def conf_get(section, key, default=""):
+        if section in config_dict and key in config_dict[section]:
+          return config_dict[section][key]
+
+        return default
+
+      def has_option(section, key):
+        return section in config_dict and key in config_dict[section]
+
+      conf = Mock()
+      attr = {
+        'get.side_effect': conf_get,
+        'has_option.side_effect': has_option
+      }
+      conf.configure_mock(**attr)
+
+      result = Hardware.osdisks(conf)
+
+    self.assertEquals(1, len(result))
+
+    expected_mounts_left = ["/"]
+    mounts_left = [item["mountpoint"] for item in result]
+
+    self.assertEquals(expected_mounts_left, mounts_left)
+
 
 if __name__ == "__main__":
   unittest.main()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
index 7b4e8f5..bfc786c 100644
--- a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
+++ b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
@@ -22,35 +22,41 @@ import os
 import random
 from resource_management.libraries.functions import conf_select
 
-DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector'
+DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector.hosts'
 DEFAULT_METRICS2_PROPERTIES_FILE_NAME = 'hadoop-metrics2.properties'
 
 def select_metric_collector_for_sink(sink_name):
-    # TODO check '*' sink_name
+  # TODO check '*' sink_name
 
-    all_collectors_string = get_metric_collectors_from_properties_file(sink_name)
-    all_collectors_list = all_collectors_string.split(',')
-    return select_metric_collector_hosts_from_hostnames(all_collectors_list)
+  all_collectors_string = get_metric_collectors_from_properties_file(sink_name)
+  return select_metric_collector_hosts_from_hostnames(all_collectors_string)
 
-def select_metric_collector_hosts_from_hostnames(hosts):
-    return random.choice(hosts)
+def select_metric_collector_hosts_from_hostnames(comma_separated_hosts):
+  if comma_separated_hosts:
+    hosts = comma_separated_hosts.split(',')
+    return get_random_host(hosts)
+  else:
+    return 'localhost'
+
+def get_random_host(hosts):
+  return random.choice(hosts)
 
 def get_metric_collectors_from_properties_file(sink_name):
-    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-    props = load_properties_from_file(os.path.join(hadoop_conf_dir, DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
-    return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  props = load_properties_from_file(os.path.join(hadoop_conf_dir, DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
+  return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
 
 def load_properties_from_file(filepath, sep='=', comment_char='#'):
-    """
-    Read the file passed as parameter as a properties file.
-    """
-    props = {}
-    with open(filepath, "rt") as f:
-        for line in f:
-            l = line.strip()
-            if l and not l.startswith(comment_char):
-                key_value = l.split(sep)
-                key = key_value[0].strip()
-                value = sep.join(key_value[1:]).strip('" \t')
-                props[key] = value
-    return props
\ No newline at end of file
+  """
+  Read the file passed as parameter as a properties file.
+  """
+  props = {}
+  with open(filepath, "rt") as f:
+    for line in f:
+      l = line.strip()
+      if l and not l.startswith(comment_char):
+        key_value = l.split(sep)
+        key = key_value[0].strip()
+        value = sep.join(key_value[1:]).strip('" \t')
+        props[key] = value
+  return props
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/ambari_commons/logging_utils.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/logging_utils.py b/ambari-common/src/main/python/ambari_commons/logging_utils.py
index 4ebe696..6535ebc 100644
--- a/ambari-common/src/main/python/ambari_commons/logging_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/logging_utils.py
@@ -67,7 +67,10 @@ def set_debug_mode_from_options(options):
 #
 # Prints an "info" messsage.
 #
-def print_info_msg(msg):
+def print_info_msg(msg, forced=False):
+  if forced:
+    print("INFO: " + msg)
+    return
   if _VERBOSE:
     print("INFO: " + msg)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/ambari_commons/parallel_processing.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/ambari_commons/parallel_processing.py b/ambari-common/src/main/python/ambari_commons/parallel_processing.py
new file mode 100644
index 0000000..c5a95de
--- /dev/null
+++ b/ambari-common/src/main/python/ambari_commons/parallel_processing.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import logging
+from multiprocessing import Process, Queue
+
+logger = logging.getLogger()
+
+SUCCESS = "SUCCESS"
+FAILED = "FAILED"
+
+class PrallelProcessResult(object):
+    def __init__(self, element, status, result):
+        self.result = result
+        self.status = status
+        self.element = element
+
+class ParallelProcess(Process):
+
+
+    def __init__(self, function, element, params, queue):
+        self.function = function
+        self.element = element
+        self.params = params
+        self.queue = queue
+        super(ParallelProcess, self).__init__()
+
+    def return_name(self):
+        ## NOTE: self.name is an attribute of multiprocessing.Process
+        return "Process running function '%s' for element '%s'" % (self.function, self.element)
+
+    def run(self):
+        try:
+            result = self.function(self.element, self.params)
+            self.queue.put(PrallelProcessResult(self.element, SUCCESS, result))
+        except Exception as e:
+            self.queue.put(PrallelProcessResult(self.element, FAILED,
+                            "Exception while running function '%s' for '%s'. Reason : %s" % (self.function, self.element, str(e))))
+        return
+
+def execute_in_parallel(function, array, params, wait_for_all = False):
+    logger.info("Started running %s for %s" % (function, array))
+    processs = []
+    q = Queue()
+    counter = len(array)
+    results = {}
+
+    for element in array:
+        process = ParallelProcess(function, element, params, q)
+        process.start()
+        processs.append(process)
+
+    while counter > 0:
+        tmp = q.get()
+        counter-=1
+        results[tmp.element] = tmp
+        if tmp.status == SUCCESS and not wait_for_all:
+            counter = 0
+
+    for process in processs:
+        process.terminate()
+
+    logger.info("Finished running %s for %s" % (function, array))
+
+    return results
+
+def func (elem, params):
+    if elem == 'S':
+        return "lalala"
+    else :
+        raise Exception('Exception')
+
+if __name__ == "__main__":
+    results = execute_in_parallel(func, ['F', 'BF', 'S'], None)
+    for result in results:
+        print results[result].element
+        print results[result].status
+        print results[result].result
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
index 3431495..46562e0 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/constants.py
@@ -97,6 +97,7 @@ class StackFeature:
   RANGER_PID_SUPPORT = "ranger_pid_support"
   RANGER_KMS_PID_SUPPORT = "ranger_kms_pid_support"
   RANGER_ADMIN_PASSWD_CHANGE = "ranger_admin_password_change"
+  RANGER_SETUP_DB_ON_START = "ranger_setup_db_on_start"
   STORM_METRICS_APACHE_CLASSES = "storm_metrics_apache_classes"
   SPARK_JAVA_OPTS_SUPPORT = "spark_java_opts_support"
   ATLAS_HBASE_SETUP = "atlas_hbase_setup"

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 519c88b..31a9be4 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -188,7 +188,7 @@ def _get_single_version_from_stack_select():
     Logger.error("Could not verify stack version by calling '{0}'. Return Code: {1}, Output: {2}.".format(get_stack_versions_cmd, str(code), str(out)))
     return None
 
-  matches = re.findall(r"([\d\.]+\-\d+)", out)
+  matches = re.findall(r"([\d\.]+(?:-\d+)?)", out)
 
   if matches and len(matches) == 1:
     stack_version = matches[0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
index ca8fe19..74db04a 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
@@ -185,7 +185,7 @@ def copy_tarballs_to_hdfs(tarball_prefix, stack_select_component_name, component
                    (get_stack_version_cmd, str(code), str(out)))
     return 1
 
-  matches = re.findall(r"([\d\.]+\-\d+)", out)
+  matches = re.findall(r"([\d\.]+(?:-\d+)?)", out)
   stack_version = matches[0] if matches and len(matches) > 0 else None
 
   if not stack_version:

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
index 8257022..5a16061 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py
@@ -70,7 +70,7 @@ def should_install_infra_solr():
 
 def should_install_infra_solr_client():
   config = Script.get_config()
-  return _has_applicable_local_component(config, ['INFRA_SOLR_CLIENT', 'ATLAS_SERVER', 'RANGER_ADMIN'])
+  return _has_applicable_local_component(config, ['INFRA_SOLR_CLIENT', 'ATLAS_SERVER', 'RANGER_ADMIN', 'LOGSEARCH_SERVER'])
 
 def should_install_logsearch_portal():
   config = Script.get_config()

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
index a1d2f95..eeae4bc 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/setup_atlas_hook.py
@@ -117,6 +117,8 @@ def setup_atlas_hook(service_name, service_props, atlas_hook_filepath, owner, gr
   """
   import params
   atlas_props = default('/configurations/application-properties', {})
+  merged_props = {}
+  merged_props.update(service_props)
 
   if has_atlas_in_cluster():
     # Take the subset
@@ -131,12 +133,12 @@ def setup_atlas_hook(service_name, service_props, atlas_hook_filepath, owner, gr
 
     merged_props.update(service_props)
 
-    Logger.info(format("Generating Atlas Hook config file {atlas_hook_filepath}"))
-    PropertiesFile(atlas_hook_filepath,
-                   properties = merged_props,
-                   owner = owner,
-                   group = group,
-                   mode = 0644)
+  Logger.info(format("Generating Atlas Hook config file {atlas_hook_filepath}"))
+  PropertiesFile(atlas_hook_filepath,
+           properties = merged_props,
+           owner = owner,
+           group = group,
+           mode = 0644)
 
 
 def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
@@ -157,22 +159,23 @@ def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
   """
   import params
 
-  if has_atlas_in_cluster():
-    atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ \
-      else format("{stack_root}/current/atlas-server")
-
-    # Will only exist if this host contains Atlas Server
-    atlas_hook_dir = os.path.join(atlas_home_dir, "hook", hook_name)
-    if os.path.exists(atlas_hook_dir):
-      Logger.info("Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done." %
-                  (jar_source_dir, atlas_hook_dir))
-
-      src_files = os.listdir(atlas_hook_dir)
-      for file_name in src_files:
-        atlas_hook_file_name = os.path.join(atlas_hook_dir, file_name)
-        source_lib_file_name = os.path.join(jar_source_dir, file_name)
-        if os.path.isfile(atlas_hook_file_name):
-          Link(source_lib_file_name, to=atlas_hook_file_name)
+  atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ \
+    else format("{stack_root}/current/atlas-server")
+
+  # Will only exist if this host contains Atlas Server
+  atlas_hook_dir = os.path.join(atlas_home_dir, "hook", hook_name)
+  if os.path.exists(atlas_hook_dir):
+    Logger.info("Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done." %
+                (jar_source_dir, atlas_hook_dir))
+
+    src_files = os.listdir(atlas_hook_dir)
+    for file_name in src_files:
+      atlas_hook_file_name = os.path.join(atlas_hook_dir, file_name)
+      source_lib_file_name = os.path.join(jar_source_dir, file_name)
+      if os.path.isfile(atlas_hook_file_name):
+        Link(source_lib_file_name, to=atlas_hook_file_name)
+  else:
+    Logger.info("Atlas hook directory path {0} doesn't exist".format(atlas_hook_dir))
 
 def install_atlas_hook_packages(atlas_plugin_package, atlas_ubuntu_plugin_package, host_sys_prepped,
                                 agent_stack_retry_on_unavailability, agent_stack_retry_count):

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/functions/simulate_perf_cluster_alert_behaviour.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/simulate_perf_cluster_alert_behaviour.py b/ambari-common/src/main/python/resource_management/libraries/functions/simulate_perf_cluster_alert_behaviour.py
new file mode 100644
index 0000000..736e5e3
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/simulate_perf_cluster_alert_behaviour.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+__all__ = ["simulate_perf_cluster_alert_behaviour"]
+
+import logging
+import random
+import time
+
+from datetime import datetime
+from resource_management.core.exceptions import Fail
+
+RESULT_CODE_OK = 'OK'
+RESULT_CODE_CRITICAL = 'CRITICAL'
+RESULT_CODE_UNKNOWN = 'UNKNOWN'
+
+OK_MESSAGE = 'Ok'
+FAIL_MESSAGE = 'Expected Fail'
+UNKNOWN_MESSAGE = 'Expected Unknown'
+
+logger = logging.getLogger('ambari_alerts')
+
+return_values_map = {"true":[RESULT_CODE_OK, OK_MESSAGE], "false":[RESULT_CODE_CRITICAL, FAIL_MESSAGE],
+                     "none":[RESULT_CODE_UNKNOWN, UNKNOWN_MESSAGE]}
+
+def simulate_perf_cluster_alert_behaviour(alert_behaviour_properties, configurations):
+  """
+  Returns a tuple containing the result code and a pre-formatted result label
+
+  Keyword arguments:
+  configurations (dictionary): a mapping of configuration key to value
+  parameters (dictionary): a mapping of script parameter key to value
+  host_name (string): the name of this host where the alert is running
+  """
+  alert_behaviour_type=None
+  alert_behaviour_type_key=alert_behaviour_properties["alert_behaviour_type"]
+  if alert_behaviour_type_key in configurations:
+    alert_behaviour_type = configurations[alert_behaviour_type_key].lower()
+
+  if alert_behaviour_type == "percentage":
+    alert_success_percentage=None
+    alert_success_percentage_key=alert_behaviour_properties["alert_success_percentage"]
+
+    if alert_success_percentage_key in configurations:
+      alert_success_percentage = configurations[alert_success_percentage_key]
+
+    if alert_success_percentage:
+      random_number = random.uniform(0, 100)
+      if random_number <= int(alert_success_percentage):
+        return (RESULT_CODE_OK, [OK_MESSAGE])
+      else:
+        return (RESULT_CODE_CRITICAL, [FAIL_MESSAGE])
+    else:
+      raise Fail("Percentage behaviour was set but alert.success.percentage was not set!")
+  elif alert_behaviour_type == "timeout":
+    alert_timeout_return_value=None
+    alert_timeout_secs=None
+    alert_timeout_return_value_key=alert_behaviour_properties["alert_timeout_return_value"]
+    alert_timeout_secs_key=alert_behaviour_properties["alert_timeout_secs"]
+
+    if alert_timeout_return_value_key in configurations:
+      alert_timeout_return_value = configurations[alert_timeout_return_value_key].lower()
+
+    if alert_timeout_secs_key in configurations:
+      alert_timeout_secs = configurations[alert_timeout_secs_key]
+
+    if alert_timeout_return_value and alert_timeout_secs:
+      logger.info("Sleeping for {0} seconds".format(alert_timeout_secs))
+      print "Sleeping for {0} seconds".format(alert_timeout_secs)
+      time.sleep(int(alert_timeout_secs))
+      return (return_values_map[alert_timeout_return_value][0], [return_values_map[alert_timeout_return_value][1]])
+    else:
+      raise Fail("Timeout behaviour was set but alert.timeout.return.value/alert.timeout.secs were not set!")
+  elif alert_behaviour_type == "flip":
+    alert_flip_interval_mins=None
+    alert_flip_interval_mins_key=alert_behaviour_properties["alert_flip_interval_mins"]
+
+    if alert_flip_interval_mins_key in configurations:
+      alert_flip_interval_mins = configurations[alert_flip_interval_mins_key]
+
+    if alert_flip_interval_mins:
+      curr_time = datetime.utcnow()
+      return_value = ((curr_time.minute / int(alert_flip_interval_mins)) % 2) == 0
+      return (return_values_map[str(return_value).lower()][0], [return_values_map[str(return_value).lower()][1]])
+    else:
+      raise Fail("Flip behaviour was set but alert.flip.interval.mins was not set!")
+
+
+
+  result_code = RESULT_CODE_OK
+  label = OK_MESSAGE
+  return (result_code, [label])
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-common/src/main/python/resource_management/libraries/script/dummy.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/script/dummy.py b/ambari-common/src/main/python/resource_management/libraries/script/dummy.py
index 2a48de3..3dcece5 100644
--- a/ambari-common/src/main/python/resource_management/libraries/script/dummy.py
+++ b/ambari-common/src/main/python/resource_management/libraries/script/dummy.py
@@ -31,6 +31,9 @@ from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.core.exceptions import ComponentIsNotRunning
 
 
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions import get_kinit_path
+
 class Dummy(Script):
   """
   Dummy component to be used for performance testing since doesn't actually run a service.
@@ -75,6 +78,14 @@ class Dummy(Script):
     print "Start"
     self.prepare()
 
+    if self.config['configurations']['cluster-env']['security_enabled'] :
+      print "Executing kinit... "
+      kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
+      principal_replaced = self.config['configurations'][self.principal_conf_name][self.principal_name].replace("_HOST", self.host_name)
+      keytab_path_replaced = self.config['configurations'][self.keytab_conf_name][self.keytab_name].replace("_HOST", self.host_name)
+      Execute("%s -kt %s %s" % (kinit_path_local, keytab_path_replaced, principal_replaced),
+              user="root")
+
     if not os.path.isfile(self.pid_file):
       print "Creating pid file: %s" % self.pid_file
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
index 667c9ff..08680f6 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
@@ -141,60 +141,4 @@ public class MapperDateTest {
     assertEquals("Invalid value wasn't returned as it is", invalidValue, mappedValue);
     assertTrue("jsonObj is not empty", jsonObj.isEmpty());
   }
-  
-  @Test
-  public void testMapperDate_patternWithoutYear_previousYearLog() throws Exception {
-    LOG.info("testMapperDate_patternWithoutYear_previousYearLog()");
-    String fieldName = "logtime";
-    Calendar currentCalendar = Calendar.getInstance();
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
-    String srcDatePattern ="MMM dd HH:mm:ss";
-    mapConfigs.put("src_date_pattern", srcDatePattern);
-    MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, fieldName, null, mapConfigs));
-    Map<String, Object> jsonObj = new HashMap<>();
-    Calendar nextMonthCalendar = Calendar.getInstance();
-    
-    nextMonthCalendar.set(Calendar.MONTH, currentCalendar.get(Calendar.MONTH)+1 );
-    String inputDateStr = new SimpleDateFormat("MMM").format(nextMonthCalendar.getTime()) + " 01 12:01:45";
-    Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
-    String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
-    assertEquals(Date.class, mappedDateValue.getClass());
-    
-    int expectedLogYear = currentCalendar.get(Calendar.YEAR)-1;
-    Calendar mapppedValueCalendar = Calendar.getInstance();
-    mapppedValueCalendar.setTime(mappedDateValue);
-    assertEquals("Mapped year wasn't matched properly", expectedLogYear, mapppedValueCalendar.get(Calendar.YEAR));
-    assertEquals("Mapped date wasn't matched properly", inputDateStr, mappedDateValueStr);
-    assertEquals("Value wasn't put into jsonObj",mappedValue, jsonObj.remove(fieldName));
-    assertTrue("jsonObj is not empty", jsonObj.isEmpty());
-  }
-  
-  @Test
-  public void testMapperDate_patternWithoutYear_currentYearLog() throws Exception {
-    LOG.info("testMapperDate_patternWithoutYear_currentYearLog()");
-    String fieldName = "logtime";
-    Calendar currentCalendar = Calendar.getInstance();
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
-    String srcDatePattern ="MMM dd HH:mm:ss";
-    mapConfigs.put("src_date_pattern", srcDatePattern);
-    MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, fieldName, null, mapConfigs));
-    Map<String, Object> jsonObj = new HashMap<>();
-    String inputDateStr = new SimpleDateFormat("MMM").format(currentCalendar.getTime()) + " 01 12:01:45";
-    Object mappedValue = mapperDate.apply(jsonObj, inputDateStr);
-    Date mappedDateValue = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").parse(mappedValue.toString());
-    String mappedDateValueStr = new SimpleDateFormat(srcDatePattern).format(mappedDateValue);
-    assertEquals(Date.class, mappedDateValue.getClass());
-    int expectedLogYear = currentCalendar.get(Calendar.YEAR);
-    Calendar mapppedValueCalendar = Calendar.getInstance();
-    mapppedValueCalendar.setTime(mappedDateValue);
-    assertEquals("Mapped year wasn't matched properly", expectedLogYear, mapppedValueCalendar.get(Calendar.YEAR));
-    assertEquals("Mapped date wasn't matched properly", inputDateStr, mappedDateValueStr);
-    assertEquals("Value wasn't put into jsonObj",mappedValue, jsonObj.remove(fieldName));
-    assertTrue("jsonObj is not empty", jsonObj.isEmpty());
-  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
index 2c3f4f5..14f83cf 100644
--- a/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
+++ b/ambari-logsearch/ambari-logsearch-portal/src/main/java/org/apache/ambari/logsearch/LogSearch.java
@@ -30,6 +30,7 @@ import org.apache.ambari.logsearch.common.ManageStartEndTime;
 import org.apache.ambari.logsearch.common.PropertiesHelper;
 import org.apache.ambari.logsearch.conf.ApplicationConfig;
 import org.apache.ambari.logsearch.util.SSLUtil;
+import org.apache.ambari.logsearch.web.listener.LogSearchSessionListener;
 import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 import org.eclipse.jetty.server.Connector;
@@ -66,7 +67,7 @@ public class LogSearch {
 
   private static final String WEB_RESOURCE_FOLDER = "webapps/app";
   private static final String ROOT_CONTEXT = "/";
-  private static final Integer SESSION_TIMEOUT = 30;
+  private static final Integer SESSION_TIMEOUT = 60 * 30;
 
 
   public static void main(String[] argv) {
@@ -136,6 +137,7 @@ public class LogSearch {
     context.setBaseResource(Resource.newResource(webResourceBase));
     context.setContextPath(ROOT_CONTEXT);
     context.setParentLoaderPriority(true);
+    context.addEventListener(new LogSearchSessionListener());
 
     // Configure Spring
     context.addEventListener(new ContextLoaderListener());