You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jo...@apache.org on 2014/11/13 21:39:54 UTC

[1/2] ambari git commit: AMBARI-8313 - Remove Nagios SSL Configuration From Ambari Scripts (jonathanhurley)

Repository: ambari
Updated Branches:
  refs/heads/trunk 90b77c72a -> d6b0db0b5


AMBARI-8313 - Remove Nagios SSL Configuration From Ambari Scripts (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d6b0db0b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d6b0db0b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d6b0db0b

Branch: refs/heads/trunk
Commit: d6b0db0b58e1d91ff8cb4002ddc163c5894f60c9
Parents: 14d1a5d
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Nov 13 12:51:47 2014 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 13 15:39:13 2014 -0500

----------------------------------------------------------------------
 .../ComponentSSLConfiguration.java              | 17 ++---
 .../server/configuration/Configuration.java     |  3 -
 ambari-server/src/main/python/ambari-server.py  | 14 ++--
 .../internal/BaseBlueprintProcessorTest.java    | 24 +++----
 .../internal/BlueprintResourceProviderTest.java | 68 ++++++++++----------
 .../src/test/python/TestAmbariServer.py         |  8 +--
 .../nagios/conf.d/hdp_mon_nagios_addons.conf    |  7 ++
 contrib/addons/src/addOns/nagios/plugins/README | 13 ++++
 contrib/addons/src/addOns/nagios/scripts/README | 13 ++++
 9 files changed, 89 insertions(+), 78 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java
index 9dea43a..a51b2ee 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/ComponentSSLConfiguration.java
@@ -18,7 +18,11 @@
 package org.apache.ambari.server.configuration;
 
 /**
- * Configuration for SSL on components (Ganglia & Nagios).
+ * Configuration for SSL communication between Ambari and 3rd party services.
+ * Currently, the following services are supported with SSL communication:
+ * <ul>
+ * <li>Ganglia</li>
+ * </ul>
  */
 public class ComponentSSLConfiguration {
 
@@ -29,7 +33,6 @@ public class ComponentSSLConfiguration {
   private String truststorePassword;
   private String truststoreType;
   private boolean gangliaSSL;
-  private boolean nagiosSSL;
 
   /**
    * The singleton.
@@ -58,7 +61,6 @@ public class ComponentSSLConfiguration {
     truststorePassword = getPassword(configuration);
     truststoreType     = configuration.getProperty(Configuration.SSL_TRUSTSTORE_TYPE_KEY);
     gangliaSSL         = Boolean.parseBoolean(configuration.getProperty(Configuration.GANGLIA_HTTPS_KEY));
-    nagiosSSL          = Boolean.parseBoolean(configuration.getProperty(Configuration.NAGIOS_HTTPS_KEY));
   }
 
 
@@ -101,15 +103,6 @@ public class ComponentSSLConfiguration {
   }
 
   /**
-   * Indicates whether or not Nagios is setup for SSL.
-   *
-   * @return true if Nagios is setup for SSL
-   */
-  public boolean isNagiosSSL() {
-    return nagiosSSL;
-  }
-
-  /**
    * Get the singleton instance.
    *
    * @return the singleton instance

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
index 9970d9a..a833c83 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
@@ -190,9 +190,6 @@ public class Configuration {
   public static final String JAVAX_SSL_TRUSTSTORE_PASSWORD = "javax.net.ssl.trustStorePassword";
   public static final String JAVAX_SSL_TRUSTSTORE_TYPE = "javax.net.ssl.trustStoreType";
   public static final String GANGLIA_HTTPS_KEY = "ganglia.https";
-  public static final String NAGIOS_HTTPS_KEY = "nagios.https";
-  public static final String NAGIOS_IGNORE_FOR_SERVICES_KEY = "nagios.ignore_for_services";
-  public static final String NAGIOS_IGNORE_FOR_HOSTS_KEY = "nagios.ignore_for_hosts";
   public static final String SRVR_TWO_WAY_SSL_PORT_DEFAULT = "8441";
   public static final String SRVR_ONE_WAY_SSL_PORT_DEFAULT = "8440";
   public static final String SRVR_CRT_NAME_DEFAULT = "ca.crt";

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index cf70f48..19b875d 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -74,7 +74,6 @@ SETUP_HTTPS_ACTION = "setup-https"
 LDAP_SETUP_ACTION = "setup-ldap"
 LDAP_SYNC_ACTION = "sync-ldap"
 SETUP_GANGLIA_HTTPS_ACTION = "setup-ganglia-https"
-SETUP_NAGIOS_HTTPS_ACTION = "setup-nagios-https"
 ENCRYPT_PASSWORDS_ACTION = "encrypt-passwords"
 SETUP_SECURITY_ACTION = "setup-security"
 REFRESH_STACK_HASH_ACTION = "refresh-stack-hash"
@@ -235,7 +234,6 @@ DEFAULT_SSL_API_PORT = 8443
 SSL_DATE_FORMAT = '%b  %d %H:%M:%S %Y GMT'
 
 GANGLIA_HTTPS = 'ganglia.https'
-NAGIOS_HTTPS = 'nagios.https'
 
 JDBC_RCA_PASSWORD_ALIAS = "ambari.db.password"
 CLIENT_SECURITY_KEY = "client.security"
@@ -4349,11 +4347,10 @@ def setup_security(args):
   print 'Choose one of the following options: '
   print '  [1] Enable HTTPS for Ambari server.'
   print '  [2] Enable HTTPS for Ganglia service.'
-  print '  [3] Enable HTTPS for Nagios service.'
-  print '  [4] Encrypt passwords stored in ambari.properties file.'
-  print '  [5] Setup Ambari kerberos JAAS configuration.'
+  print '  [3] Encrypt passwords stored in ambari.properties file.'
+  print '  [4] Setup Ambari kerberos JAAS configuration.'
   print '=' * 75
-  choice = get_validated_string_input('Enter choice, (1-5): ', '0', '[1-5]',
+  choice = get_validated_string_input('Enter choice, (1-4): ', '0', '[1-4]',
                                       'Invalid choice', False, False)
 
   if choice == '1':
@@ -4362,11 +4359,8 @@ def setup_security(args):
     setup_component_https("Ganglia", "setup-ganglia-https", GANGLIA_HTTPS,
                          "ganglia_cert")
   elif choice == '3':
-    setup_component_https("Nagios", "setup-nagios-https", NAGIOS_HTTPS,
-                          "nagios_cert")
-  elif choice == '4':
     setup_master_key()
-  elif choice == '5':
+  elif choice == '4':
     setup_ambari_krb5_jaas()
   else:
     raise FatalException('Unknown option for setup-security command.')

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
index d40c29d..8540d8b 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java
@@ -93,8 +93,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
@@ -190,8 +190,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
@@ -285,8 +285,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
@@ -378,8 +378,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
@@ -473,8 +473,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {
@@ -566,8 +566,8 @@ public class BaseBlueprintProcessorTest {
         super.registerConditionalDependencies();
 
         Map<DependencyInfo, String> dependencyConditionalServiceMap = getDependencyConditionalServiceMap();
-        Collection<DependencyInfo> nagiosDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
-        for (DependencyInfo dependency : nagiosDependencies) {
+        Collection<DependencyInfo> monitoringDependencies = getDependenciesForComponent("FAKE_MONITORING_SERVER");
+        for (DependencyInfo dependency : monitoringDependencies) {
           if (dependency.getComponentName().equals("HCAT")) {
             dependencyConditionalServiceMap.put(dependency, "HIVE");
           } else if (dependency.getComponentName().equals("OOZIE_CLIENT")) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
index 9514b21..c620bc6 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java
@@ -18,7 +18,33 @@
 
 package org.apache.ambari.server.controller.internal;
 
-import com.google.gson.Gson;
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.reset;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
@@ -30,6 +56,9 @@ import org.apache.ambari.server.controller.StackServiceComponentRequest;
 import org.apache.ambari.server.controller.StackServiceComponentResponse;
 import org.apache.ambari.server.controller.StackServiceRequest;
 import org.apache.ambari.server.controller.StackServiceResponse;
+import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategy;
+import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategyV1;
+import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategyV2;
 import org.apache.ambari.server.controller.predicate.EqualsPredicate;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
@@ -54,42 +83,13 @@ import org.apache.ambari.server.state.DependencyInfo;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.utils.StageUtils;
 import org.easymock.Capture;
-
-import static org.easymock.EasyMock.expectLastCall;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertNotNull;
-
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.*;
-import static org.easymock.EasyMock.capture;
-import static org.easymock.EasyMock.createMock;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.createStrictMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
-import static org.junit.Assert.fail;
+import com.google.gson.Gson;
 
 /**
  * BlueprintResourceProvider unit tests.
@@ -1450,7 +1450,7 @@ public class BlueprintResourceProviderTest {
   @Test
   public void testDecidePopulationStrategy_withNewSchema_attributes() throws Exception {
     Map<String, String> configMap = new HashMap<String, String>();
-    configMap.put("global/properties_attributes/final/nagios_contact", "true");
+    configMap.put("global/properties_attributes/final/foo_contact", "true");
 
     BlueprintConfigPopulationStrategy provisioner =
         provider.decidePopulationStrategy(configMap);
@@ -1462,7 +1462,7 @@ public class BlueprintResourceProviderTest {
   @Test
   public void testDecidePopulationStrategy_withNewSchema_properties() throws Exception {
     Map<String, String> configMap = new HashMap<String, String>();
-    configMap.put("global/properties/nagios_contact", "foo@ffl.dsfds");
+    configMap.put("global/properties/foo_contact", "foo@ffl.dsfds");
 
     BlueprintConfigPopulationStrategy provisioner =
         provider.decidePopulationStrategy(configMap);
@@ -1474,7 +1474,7 @@ public class BlueprintResourceProviderTest {
   @Test
   public void testDecidePopulationStrategy_unsupportedSchema() throws Exception {
     Map<String, String> configMap = new HashMap<String, String>();
-    configMap.put("global/properties/lot/nagios_contact", "foo@ffl.dsfds");
+    configMap.put("global/properties/lot/foo_contact", "foo@ffl.dsfds");
     expectedException.expect(IllegalArgumentException.class);
     expectedException.expectMessage(provider.SCHEMA_IS_NOT_SUPPORTED_MESSAGE);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 0641929..ebce460 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -254,15 +254,9 @@ class TestAmbariServer(TestCase):
 
     get_validated_string_input_mock.return_value = '3'
     ambari_server.setup_security(args)
-    self.assertTrue(setup_component_https.called)
-    setup_component_https.assert_called_with("Nagios", "setup-nagios-https",
-                          ambari_server.NAGIOS_HTTPS, "nagios_cert")
-
-    get_validated_string_input_mock.return_value = '4'
-    ambari_server.setup_security(args)
     self.assertTrue(setup_master_key.called)
 
-    get_validated_string_input_mock.return_value = '5'
+    get_validated_string_input_mock.return_value = '4'
     ambari_server.setup_security(args)
     self.assertTrue(setup_ambari_krb5_jaas.called)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/contrib/addons/src/addOns/nagios/conf.d/hdp_mon_nagios_addons.conf
----------------------------------------------------------------------
diff --git a/contrib/addons/src/addOns/nagios/conf.d/hdp_mon_nagios_addons.conf b/contrib/addons/src/addOns/nagios/conf.d/hdp_mon_nagios_addons.conf
new file mode 100644
index 0000000..fbaeb2a
--- /dev/null
+++ b/contrib/addons/src/addOns/nagios/conf.d/hdp_mon_nagios_addons.conf
@@ -0,0 +1,7 @@
+Alias /ambarinagios /usr/share/hdp
+<Directory /usr/share/hdp>
+  Options None
+  AllowOverride None
+  Order allow,deny
+  Allow from all
+</Directory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/contrib/addons/src/addOns/nagios/plugins/README
----------------------------------------------------------------------
diff --git a/contrib/addons/src/addOns/nagios/plugins/README b/contrib/addons/src/addOns/nagios/plugins/README
new file mode 100644
index 0000000..74c18ac
--- /dev/null
+++ b/contrib/addons/src/addOns/nagios/plugins/README
@@ -0,0 +1,13 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+This is a placeholder for Nagios contribution plugins.

http://git-wip-us.apache.org/repos/asf/ambari/blob/d6b0db0b/contrib/addons/src/addOns/nagios/scripts/README
----------------------------------------------------------------------
diff --git a/contrib/addons/src/addOns/nagios/scripts/README b/contrib/addons/src/addOns/nagios/scripts/README
new file mode 100644
index 0000000..5962cbd
--- /dev/null
+++ b/contrib/addons/src/addOns/nagios/scripts/README
@@ -0,0 +1,13 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+This is a placeholder for Nagios contribution scripts.


[2/2] ambari git commit: AMBARI-8311 - Alerts: Remove All Nagios References From Agent Code (jonathanhurley)

Posted by jo...@apache.org.
AMBARI-8311 - Alerts: Remove All Nagios References From Agent Code (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14d1a5de
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14d1a5de
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14d1a5de

Branch: refs/heads/trunk
Commit: 14d1a5de49d38a6436c22510e15dd1b531f116cb
Parents: 90b77c7
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Thu Nov 13 11:19:50 2014 -0500
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Thu Nov 13 15:39:13 2014 -0500

----------------------------------------------------------------------
 ambari-agent/conf/unix/ambari-agent.ini         |  5 +-
 .../main/python/ambari_agent/AmbariConfig.py    |  4 --
 .../src/main/python/ambari_agent/HostInfo.py    |  8 +--
 .../ambari_agent/StackVersionsFileHandler.py    |  2 +-
 .../test/python/ambari_agent/TestActionQueue.py | 11 ---
 .../ambari_agent/TestActualConfigHandler.py     |  4 +-
 .../ambari_agent/TestCommandStatusDict.py       |  4 +-
 .../test/python/ambari_agent/TestHostCleanup.py |  8 +--
 .../test/python/ambari_agent/TestHostInfo.py    | 10 +--
 .../test/python/ambari_agent/TestLiveStatus.py  |  4 +-
 .../TestStackVersionsFileHandler.py             | 16 ++---
 .../dummy_files/dummy_current_stack             |  1 -
 .../ambari_agent/dummy_puppet_output_error2.txt | 40 -----------
 .../ambari_agent/dummy_puppet_output_error3.txt | 76 --------------------
 .../ambari_agent/examples/ControllerTester.py   |  6 --
 15 files changed, 26 insertions(+), 173 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/conf/unix/ambari-agent.ini
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent.ini b/ambari-agent/conf/unix/ambari-agent.ini
index fc76d0e..41e2895 100644
--- a/ambari-agent/conf/unix/ambari-agent.ini
+++ b/ambari-agent/conf/unix/ambari-agent.ini
@@ -44,9 +44,8 @@ pidLookupPath=/var/run/
 [heartbeat]
 state_interval=6
 dirs=/etc/hadoop,/etc/hadoop/conf,/etc/hbase,/etc/hcatalog,/etc/hive,/etc/oozie,
-  /etc/sqoop,/etc/ganglia,/etc/nagios,
+  /etc/sqoop,/etc/ganglia,
   /var/run/hadoop,/var/run/zookeeper,/var/run/hbase,/var/run/templeton,/var/run/oozie,
-  /var/log/hadoop,/var/log/zookeeper,/var/log/hbase,/var/run/templeton,/var/log/hive,
-  /var/log/nagios
+  /var/log/hadoop,/var/log/zookeeper,/var/log/hbase,/var/run/templeton,/var/log/hive
 ; 0 - unlimited
 log_lines_count=300

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
index ca2e80c..6c79b6b 100644
--- a/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
+++ b/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
@@ -108,7 +108,6 @@ rolesToClass = {
   'MYSQL_SERVER': 'hdp-mysql::server',
   'WEBHCAT_SERVER': 'hdp-templeton::server',
   'DASHBOARD': 'hdp-dashboard',
-  'NAGIOS_SERVER': 'hdp-nagios::server',
   'GANGLIA_SERVER': 'hdp-ganglia::server',
   'GANGLIA_MONITOR': 'hdp-ganglia::monitor',
   'HTTPD': 'hdp-monitor-webserver',
@@ -159,7 +158,6 @@ servicesToPidNames = {
   'ZOOKEEPER_SERVER': 'zookeeper_server.pid',
   'FLUME_SERVER': 'flume-node.pid',
   'TEMPLETON_SERVER': 'templeton.pid',
-  'NAGIOS_SERVER': 'nagios.pid',
   'GANGLIA_SERVER': 'gmetad.pid',
   'GANGLIA_MONITOR': 'gmond.pid',
   'HBASE_MASTER': 'hbase-{USER}-master.pid',
@@ -200,8 +198,6 @@ pidPathesVars = [
    'defaultValue' : '/var/run/ganglia/hdp'},
   {'var' : 'hbase_pid_dir',
    'defaultValue' : '/var/run/hbase'},
-  {'var' : '',
-   'defaultValue' : '/var/run/nagios'},
   {'var' : 'zk_pid_dir',
    'defaultValue' : '/var/run/zookeeper'},
   {'var' : 'oozie_pid_dir',

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/main/python/ambari_agent/HostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/HostInfo.py b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
index 5e06ffa..89e22b1 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostInfo.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostInfo.py
@@ -44,7 +44,7 @@ SERVICE_CMD = "service"
 class HostInfo:
   # List of project names to be used to find alternatives folders etc.
   DEFAULT_PROJECT_NAMES = [
-    "hadoop*", "hadoop", "hbase", "hcatalog", "hive", "ganglia", "nagios",
+    "hadoop*", "hadoop", "hbase", "hcatalog", "hive", "ganglia",
     "oozie", "sqoop", "hue", "zookeeper", "mapred", "hdfs", "flume",
     "storm", "hive-hcatalog", "tez", "falcon", "ambari_qa", "hadoop_deploy",
     "rrdcached", "hcat", "ambari-qa", "sqoop-ambari-qa", "sqoop-ambari_qa",
@@ -58,7 +58,7 @@ class HostInfo:
 
   # Set of default users (need to be replaced with the configured user names)
   DEFAULT_USERS = [
-    "nagios", "hive", "ambari-qa", "oozie", "hbase", "hcat", "mapred",
+    "hive", "ambari-qa", "oozie", "hbase", "hcat", "mapred",
     "hdfs", "rrdcached", "zookeeper", "flume", "sqoop", "sqoop2",
     "hue", "yarn", "tez", "storm", "falcon", "kafka","knox"
   ]
@@ -86,8 +86,8 @@ class HostInfo:
 
   # Additional packages to look for (search packages that start with these)
   ADDITIONAL_PACKAGES = [
-    "rrdtool", "rrdtool-python", "nagios", "ganglia", "gmond", "gweb", "libconfuse", "ambari-log4j",
-    "hadoop", "zookeeper", "oozie", "webhcat"
+    "rrdtool", "rrdtool-python", "ganglia", "gmond", "gweb", "libconfuse", 
+    "ambari-log4j", "hadoop", "zookeeper", "oozie", "webhcat"
   ]
 
   # ignore packages from repos whose names start with these strings

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py b/ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
index 77c5253..524755d 100644
--- a/ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
+++ b/ambari-agent/src/main/python/ambari_agent/StackVersionsFileHandler.py
@@ -89,7 +89,7 @@ class StackVersionsFileHandler:
   def extract(self, statement):
     '''
     Extracts <Component>, <HDPstack version> values from lines like
-    NAGIOS	StackVersion-1.3.0
+    GANGLIA	StackVersion-1.3.0
     '''
     parts = statement.strip().split()
     if len(parts) != 2:

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index 874b4c9..034dba3 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -101,17 +101,6 @@ class TestActionQueue(TestCase):
     'hostLevelParams': {}
     }
 
-  nagios_install_command = {
-    'commandType': 'EXECUTION_COMMAND',
-    'role': u'NAGIOS',
-    'roleCommand': u'INSTALL',
-    'commandId': '1-1',
-    'taskId': 6,
-    'clusterName': u'cc',
-    'serviceName': u'HDFS',
-    'hostLevelParams': {}
-    }
-
   hbase_install_command = {
     'commandType': 'EXECUTION_COMMAND',
     'role': u'HBASE',

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
index c659a03..166fd57 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActualConfigHandler.py
@@ -34,7 +34,7 @@ class TestActualConfigHandler(TestCase):
   def setUp(self):
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "ZOOKEEPER", "OOZIE",
       "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
@@ -90,8 +90,6 @@ class TestActualConfigHandler(TestCase):
        "componentName" : "HBASE_MASTER"},
       {"serviceName" : "HBASE",
        "componentName" : "HBASE_REGIONSERVER"},
-      {"serviceName" : "NAGIOS",
-       "componentName" : "NAGIOS_SERVER"},
       {"serviceName" : "FLUME",
        "componentName" : "FLUME_SERVER"},
       {"serviceName" : "ZOOKEEPER",

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py b/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
index 32df30b..953f088 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCommandStatusDict.py
@@ -63,7 +63,7 @@ class TestCommandStatusDict(TestCase):
       'roleCommand': u'INSTALL',
       'serviceName': u'HDFS',
       'stderr': '',
-      'stdout': "notice: /Stage[1]/Hdp::Iptables/Service[iptables]/ensure: ensure changed 'running' to 'stopped'\nnotice: /Stage[1]/Hdp/File[/tmp/changeUid.sh]/ensure: defined content as '{md5}32b994a2e970f8acc3c91c198b484654'\nnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Package[snappy]/Hdp::Package::Process_pkg[snappy]/Package[snappy]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::Group[nagios_group]/Group[nagios_group]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::User[nagios_user]/User[nagios]/ensure: created\nnotice: /Stage[1]/Hdp::Snmp/Hdp::Package[snmp]/Hdp::Package::Process_pkg[snmp]/Package[net-snmp-utils]/ensure: created",
+      'stdout': "notice: /Stage[1]/Hdp::Iptables/Service[iptables]/ensure: ensure changed 'running' to 'stopped'\nnotice: /Stage[1]/Hdp/File[/tmp/changeUid.sh]/ensure: defined content as '{md5}32b994a2e970f8acc3c91c198b484654'\nnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Package[snappy]/Hdp::Package::Process_pkg[snappy]/Package[snappy]/ensure: created\nnotice: /Stage[1]/Hdp::Snmp/Hdp::Package[snmp]/Hdp::Package::Process_pkg[snmp]/Package[net-snmp-utils]/ensure: created",
       'taskId': 5
     }
     command_in_progress1_report = {
@@ -164,7 +164,7 @@ class TestCommandStatusDict(TestCase):
       'roleCommand': u'INSTALL',
       'serviceName': u'HDFS',
       'stderr': '',
-      'stdout': "notice: /Stage[1]/Hdp::Iptables/Service[iptables]/ensure: ensure changed 'running' to 'stopped'\nnotice: /Stage[1]/Hdp/File[/tmp/changeUid.sh]/ensure: defined content as '{md5}32b994a2e970f8acc3c91c198b484654'\nnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Package[snappy]/Hdp::Package::Process_pkg[snappy]/Package[snappy]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::Group[nagios_group]/Group[nagios_group]/ensure: created\nnotice: /Stage[1]/Hdp/Hdp::User[nagios_user]/User[nagios]/ensure: created\nnotice: /Stage[1]/Hdp::Snmp/Hdp::Package[snmp]/Hdp::Package::Process_pkg[snmp]/Package[net-snmp-utils]/ensure: created",
+      'stdout': "notice: /Stage[1]/Hdp::Iptables/Service[iptables]/ensure: ensure changed 'running' to 'stopped'\nnotice: /Stage[1]/Hdp/File[/tmp/changeUid.sh]/ensure: defined content as '{md5}32b994a2e970f8acc3c91c198b484654'\nnotice: /Stage[1]/Hdp::Snappy::Package/Hdp::Package[snappy]/Hdp::Package::Process_pkg[snappy]/Package[snappy]/ensure: created\nnotice: /Stage[1]/Hdp::Snmp/Hdp::Package[snmp]/Hdp::Package::Process_pkg[snmp]/Package[net-snmp-utils]/ensure: created",
       'taskId': 5
     }
     command_in_progress1_report = {

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
index c958412..5e6f7c4 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostCleanup.py
@@ -49,13 +49,13 @@ hostcheck_result_fileContent = """[processes]
 proc_list = 323,434
 
 [users]
-usr_list = rrdcached,ambari-qa,hive,oozie,hbase,hcat,mysql,mapred,hdfs,zookeeper,sqoop,nagios
+usr_list = rrdcached,ambari-qa,hive,oozie,hbase,hcat,mysql,mapred,hdfs,zookeeper,sqoop
 
 [repositories]
 repo_list = HDP-1.3.0,HDP-epel
 
 [directories]
-dir_list = /etc/hadoop,/etc/hbase,/etc/hcatalog,/tmp/hive,/tmp/nagios,/var/nagios
+dir_list = /etc/hadoop,/etc/hbase,/etc/hcatalog,/tmp/hive
 
 [alternatives]
 symlink_list = hcatalog-conf,hadoop-default,hadoop-log,oozie-conf
@@ -210,7 +210,7 @@ class TestHostCleanup(TestCase):
                       do_erase_alternatives_method, get_additional_dirs_method, clear_cache_mock):
     out = StringIO.StringIO()
     sys.stdout = out
-    get_additional_dirs_method.return_value = ['/tmp/hadoop-nagios','/tmp/hsperfdata_007']
+    get_additional_dirs_method.return_value = ['/tmp/hadoop-yarn','/tmp/hsperfdata_007']
     propertyMap = {PACKAGE_SECTION:['abcd', 'pqrst'], USER_SECTION:['abcd', 'pqrst'],
                    REPO_SECTION:['abcd', 'pqrst'], DIR_SECTION:['abcd', 'pqrst'],
                    PROCESS_SECTION:['abcd', 'pqrst'],
@@ -227,7 +227,7 @@ class TestHostCleanup(TestCase):
     self.assertTrue(do_erase_packages_method.called)
     self.assertTrue(do_kill_processes_method.called)
     self.assertTrue(do_erase_alternatives_method.called)
-    calls = [call(['decf']), call(['abcd', 'pqrst']), call(['/tmp/hadoop-nagios','/tmp/hsperfdata_007'])]
+    calls = [call(['decf']), call(['abcd', 'pqrst']), call(['/tmp/hadoop-yarn','/tmp/hsperfdata_007'])]
     do_erase_dir_silent_method.assert_has_calls(calls)
     do_erase_packages_method.assert_called_once_with(['abcd', 'pqrst'])
     do_erase_files_silent_method.assert_called_once_with(['abcd', 'pqrst'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
index 8cf9220..afacf38 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHostInfo.py
@@ -90,8 +90,8 @@ class TestHostInfo(TestCase):
       ["hadoop-a", "2.3", "HDP"], ["zk", "3.1", "HDP"], ["webhcat", "3.1", "HDP"],
       ["hadoop-b", "2.3", "HDP-epel"], ["epel", "3.1", "HDP-epel"], ["epel-2", "3.1", "HDP-epel"],
       ["hadoop-c", "2.3", "Ambari"], ["ambari-s", "3.1", "Ambari"],
-      ["nagios", "2.3", "NAGIOS"], ["rrd", "3.1", "RRD"],
-      ["keeper-1", "2.3", "NAGIOS"], ["keeper-2", "3.1", "base"],["def-def.x86", "2.2", "DEF.3"],
+      ["ganglia", "2.3", "GANGLIA"], ["rrd", "3.1", "RRD"],
+      ["keeper-1", "2.3", "GANGLIA"], ["keeper-2", "3.1", "base"],["def-def.x86", "2.2", "DEF.3"],
       ["def.1", "1.2", "NewDEF"]
     ]
     availablePackages = [
@@ -103,7 +103,7 @@ class TestHostInfo(TestCase):
 
     packagesToLook = ["webhcat", "hadoop", "*-def"]
     reposToIgnore = ["ambari"]
-    additionalPackages = ["nagios", "rrd"]
+    additionalPackages = ["ganglia", "rrd"]
 
     repos = []
     packageAnalyzer.getInstalledRepos(packagesToLook, installedPackages + availablePackages, reposToIgnore, repos)
@@ -121,13 +121,13 @@ class TestHostInfo(TestCase):
     additionalPkgsInstalled = packageAnalyzer.getInstalledPkgsByNames(
         additionalPackages, installedPackages)
     self.assertEqual(2, len(additionalPkgsInstalled))
-    expected = ["nagios", "rrd"]
+    expected = ["ganglia", "rrd"]
     for additionalPkg in expected:
       self.assertTrue(additionalPkg in additionalPkgsInstalled)
 
     allPackages = list(set(packagesInstalled + additionalPkgsInstalled))
     self.assertEqual(7, len(allPackages))
-    expected = ["hadoop-a", "zk", "webhcat", "hadoop-b", "nagios", "rrd", "def-def.x86"]
+    expected = ["hadoop-a", "zk", "webhcat", "hadoop-b", "ganglia", "rrd", "def-def.x86"]
     for package in expected:
       self.assertTrue(package in allPackages)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
index 9dfb47a..a080d49 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestLiveStatus.py
@@ -36,7 +36,7 @@ class TestLiveStatus(TestCase):
     sys.stdout = out
     LiveStatus.SERVICES = [
       "HDFS", "MAPREDUCE", "GANGLIA", "HBASE",
-      "NAGIOS", "ZOOKEEPER", "OOZIE",
+      "ZOOKEEPER", "OOZIE",
       "KERBEROS", "TEMPLETON", "HIVE",
       "YARN", "MAPREDUCE2", "FLUME", "TEZ",
       "FALCON", "STORM"
@@ -92,8 +92,6 @@ class TestLiveStatus(TestCase):
        "componentName" : "HBASE_MASTER"},
       {"serviceName" : "HBASE",
        "componentName" : "HBASE_REGIONSERVER"},
-      {"serviceName" : "NAGIOS",
-       "componentName" : "NAGIOS_SERVER"},
       {"serviceName" : "FLUME",
        "componentName" : "FLUME_SERVER"},
       {"serviceName" : "ZOOKEEPER",

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/TestStackVersionsFileHandler.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestStackVersionsFileHandler.py b/ambari-agent/src/test/python/ambari_agent/TestStackVersionsFileHandler.py
index 160b929..f01fd3e 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestStackVersionsFileHandler.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestStackVersionsFileHandler.py
@@ -41,8 +41,6 @@ class TestStackVersionsFileHandler(TestCase):
   @patch.object(stackVersionsFileHandler, 'touch_file')
   def test_read_stack_version(self, touch_method):
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
-    result = stackVersionsFileHandler.read_stack_version("NAGIOS_SERVER")
-    self.assertEquals(result, '{"stackName":"HDP","stackVersion":"1.2.1"}')
     result = stackVersionsFileHandler.read_stack_version("GANGLIA_SERVER")
     self.assertEquals(result, '{"stackName":"HDP","stackVersion":"1.2.2"}')
     result = stackVersionsFileHandler.read_stack_version("NOTEXISTING")
@@ -54,21 +52,19 @@ class TestStackVersionsFileHandler(TestCase):
   def test_read_all_stack_versions(self, touch_method):
     stackVersionsFileHandler.versionsFilePath = dummyVersionsFile
     result = stackVersionsFileHandler.read_all_stack_versions()
-    self.assertEquals(len(result.keys()), 4)
-    self.assertEquals(result["NAGIOS_SERVER"],
-          '{"stackName":"HDP","stackVersion":"1.2.1"}')
+    self.assertEquals(len(result.keys()), 3)
     self.assertEquals(result["HCATALOG"],
           '{"stackName":"HDP","stackVersion":"1.2.2"}')
     self.assertTrue(touch_method.called)
 
 
   def test_extract(self):
-    s = '   NAGIOS_SERVER	\t  {"stackName":"HDP","stackVersion":"1.3.0"}  '
+    s = '   GANGLIA_SERVER	\t  {"stackName":"HDP","stackVersion":"1.3.0"}  '
     comp, ver = stackVersionsFileHandler.extract(s)
-    self.assertEqual(comp, "NAGIOS_SERVER")
+    self.assertEqual(comp, "GANGLIA_SERVER")
     self.assertEqual(ver, '{"stackName":"HDP","stackVersion":"1.3.0"}')
     # testing wrong value
-    s = "   NAGIOS_SERVER	"
+    s = "   GANGLIA_SERVER	"
     comp, ver = stackVersionsFileHandler.extract(s)
     self.assertEqual(comp, stackVersionsFileHandler.DEFAULT_VER)
     self.assertEqual(ver, stackVersionsFileHandler.DEFAULT_VER)
@@ -96,7 +92,7 @@ class TestStackVersionsFileHandler(TestCase):
       os.path.dirname(tmpfile)
     stackVersionsFileHandler.touch_file()
     stackVersionsFileHandler.write_stack_version(
-      "NAGIOS_SERVER", '"stackVersion":"1.3.0"')
+      "GANGLIA_SERVER", '"stackVersion":"1.3.0"')
     # Checking if backup file exists
     expectedBackupFile = tmpfile + ".bak"
     self.assertTrue(os.path.isfile(expectedBackupFile))
@@ -104,7 +100,7 @@ class TestStackVersionsFileHandler(TestCase):
     # Checking content of created file
     content = stackVersionsFileHandler.read_all_stack_versions()
     self.assertEquals(len(content), 1)
-    self.assertEqual(content['NAGIOS_SERVER'], '"stackVersion":"1.3.0"')
+    self.assertEqual(content['GANGLIA_SERVER'], '"stackVersion":"1.3.0"')
     self.assertTrue(os.path.isfile(tmpfile))
     os.remove(tmpfile)
     # Restoring old values

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_current_stack
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_current_stack b/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_current_stack
index 7123c53..b468638 100644
--- a/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_current_stack
+++ b/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_current_stack
@@ -17,6 +17,5 @@ limitations under the License.
 '''
 
 DATANODE    {"stackName":"HDP","stackVersion":"1.2.0"}
-NAGIOS_SERVER   {"stackName":"HDP","stackVersion":"1.2.1"}
 HCATALOG    {"stackName":"HDP","stackVersion":"1.2.2"}
 GANGLIA_SERVER  {"stackName":"HDP","stackVersion":"1.2.2"}

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error2.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error2.txt b/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error2.txt
deleted file mode 100644
index 19ae347..0000000
--- a/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error2.txt
+++ /dev/null
@@ -1,40 +0,0 @@
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-[0;36mnotice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/returns: Bad connection to FS. command aborted. exception: Call to dev.hortonworks.com/10.0.2.15:8020 failed on connection exception: java.net.ConnectException: Connection refused
-err: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/returns: change from notrun to 0 failed: hadoop --config /etc/hadoop/conf fs -mkdir /mapred returned 255 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp/manifests/init.pp:267
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::begin]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::begin]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred]/Hdp-hadoop::Exec-hadoop[fs -chown mapred /mapred]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -chown mapred /mapred]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -chown mapred /mapred::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:50 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:51 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:52 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:53 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:54 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:55 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:56 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:57 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 7 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:58 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 8 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: 12/11/10 08:57:59 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 9 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: Bad connection to FS. command aborted. exception: Call to dev.hortonworks.com/10.0.2.15:8020 failed on connection exception: java.net.ConnectException: Connection refused
-err: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/returns: change from notrun to 0 failed: hadoop --config /etc/hadoop/conf fs -mkdir /tmp returned 255 instead of one of [0] at /var/lib/ambari-agent/puppet/modules/hdp/manifests/init.pp:267
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /tmp::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/tmp]/Hdp-hadoop::Exec-hadoop[fs -mkdir /tmp]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /tmp]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /tmp::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::begin]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::begin]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::end]: Dependency Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred] has failures: true
-warning: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/mapred/system]/Hdp-hadoop::Exec-hadoop[fs -mkdir /mapred/system]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system]/Anchor[hdp::exec::hadoop --config /etc/hadoop/conf fs -mkdir /mapred/system::end]: Skipping because of failed dependencies
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:14 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:15 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:16 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 2 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:17 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 3 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:18 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 4 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:19 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 5 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)
-notice: /Stage[2]/Hdp-hadoop::Namenode/Hdp-hadoop::Namenode::Create_app_directories[create_app_directories]/Hdp-hadoop::Hdfs::Directory[/user/ambari_qa]/Hdp-hadoop::Exec-hadoop[fs -mkdir /user/ambari_qa]/Hdp::Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/Exec[hadoop --config /etc/hadoop/conf fs -mkdir /user/ambari_qa]/returns: 12/11/10 08:58:20 INFO ipc.Client: Retrying connect to server: dev.hortonworks.com/10.0.2.15:8020. Already tried 6 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1 SECONDS)

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error3.txt
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error3.txt b/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error3.txt
deleted file mode 100644
index 5faf417..0000000
--- a/ambari-agent/src/test/python/ambari_agent/dummy_puppet_output_error3.txt
+++ /dev/null
@@ -1,76 +0,0 @@
-ESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-plugins]/Hdp::Package[nagios-plugins]/Hdp:
-:Package::Process_pkg[nagios-plugins]/Package[nagios-plugins-1.4.9]/ensure: createdESC[0mESC[1;35merr: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::Pack
-age::Process_pkg[nagios-addons]/Package[hdp_mon_nagios_addons]/ensure: change from absent to present failed: Execution of '/usr/bin/yum -d 0 -e 0 -y install hdp_mon_nagios_addons' returned 1:
-Error Downloading Packages:
-  hdp_mon_nagios_addons-0.0.2.15-1.noarch: failure: noarch/hdp_mon/hdp_mon_nagios_addons-0.0.2.15-1.noarch.rpm from AMBARI.dev-1.x: [Errno 256] No more mirrors to try.
-ESC[0m
-ESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::Package::Process_pkg[nagios-addons]/Anchor[hdp::package::nagios-addons::end]: Dependency Package[hdp_mon_nagios_addons] has failures:
-trueESC[0mESC[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Packages/Hdp-nagios::Server::Package[nagios-addons]/Hdp::Package[nagios-addons]/Hdp::
-Package::Process_pkg[nagios-addons]/Anchor[hdp::package::nagios-addons::end]: Skipping because of failed dependenciesESC[0mESC[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Packages/Anchor[hdp-nagios::server::packages::end]: Dependency Package[hdp_mon
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-nagios::Server::Web_permisssions/Hdp::Exec[htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin]/Anchor[hdp::exec::htpasswd -c -b  /etc/nagios/htpasswd.users nagiosadmin admin::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::begin]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Exec[monitor webserver restart]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Dependency Package[hdp_mon_nagios_addons] has failures: true\u001B[0m
-\u001B[0;33mwarning: /Stage[2]/Hdp-monitor-webserver/Hdp::Exec[monitor webserver restart]/Anchor[hdp::exec::monitor webserver restart::end]: Skipping because of failed dependencies\u001B[0m
-\u001B[0;36mnotice: Finished catalog run in 49.63

http://git-wip-us.apache.org/repos/asf/ambari/blob/14d1a5de/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py b/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
index a3163f4..8103872 100644
--- a/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
+++ b/ambari-agent/src/test/python/ambari_agent/examples/ControllerTester.py
@@ -74,9 +74,6 @@ responces = [
           "ganglia_server_host": [
               "dev.hortonworks.com"
           ],
-          "nagios_server_host": [
-              "dev.hortonworks.com"
-          ],
           "namenode_host": [
               "dev.hortonworks.com"
           ],
@@ -212,9 +209,6 @@ if __name__ == '__main__':
 #          "ganglia_server_host": [
 #              "dev.hortonworks.com"
 #          ],
-#          "nagios_server_host": [
-#              "dev.hortonworks.com"
-#          ],
 #          "namenode_host": [
 #              "dev.hortonworks.com"
 #          ],