You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/02/18 14:33:20 UTC

[01/33] ambari git commit: AMBARI-15051. Improvements and fixes for database check.(vbrodetskyi)

Repository: ambari
Updated Branches:
  refs/heads/branch-dev-patch-upgrade e4d1475ea -> 418745d12


AMBARI-15051. Improvements and fixes for database check.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4761fe7c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4761fe7c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4761fe7c

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 4761fe7c40b10d4a5c1f7b2564df475c4a66222b
Parents: 0ea255c
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Feb 15 14:51:36 2016 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Feb 15 14:51:36 2016 +0200

----------------------------------------------------------------------
 ambari-server/conf/unix/log4j.properties        |   9 ++
 ambari-server/conf/windows/log4j.properties     |   9 ++
 ambari-server/src/main/conf/log4j.properties    |   9 ++
 .../server/checks/CheckDatabaseHelper.java      | 155 +++++++++++++------
 .../main/python/ambari_server/checkDatabase.py  |   7 +-
 .../server/checks/CheckDatabaseHelperTest.java  |  29 ++--
 .../src/test/python/TestAmbariServer.py         |   3 +-
 7 files changed, 152 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/conf/unix/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/unix/log4j.properties b/ambari-server/conf/unix/log4j.properties
index c87b1f4..18ec33e 100644
--- a/ambari-server/conf/unix/log4j.properties
+++ b/ambari-server/conf/unix/log4j.properties
@@ -23,6 +23,7 @@ ambari.log.file=ambari-server.log
 ambari.config-changes.file=ambari-config-changes.log
 ambari.alerts.file=ambari-alerts.log
 ambari.eclipselink.file=ambari-eclipselink.log
+ambari.dbcheck.file=ambari-server-check-database.log
 
 log4j.rootLogger=INFO,file
 
@@ -50,6 +51,14 @@ log4j.appender.alerts.File=${ambari.log.dir}/${ambari.alerts.file}
 log4j.appender.alerts.layout=org.apache.log4j.PatternLayout
 log4j.appender.alerts.layout.ConversionPattern=%d{ISO8601} %m%n
 
+# Log database check process
+log4j.logger.org.apache.ambari.server.checks.CheckDatabaseHelper=INFO, dbcheck
+log4j.additivity.org.apache.ambari.server.checks.CheckDatabaseHelper=false
+log4j.appender.dbcheck=org.apache.log4j.FileAppender
+log4j.appender.dbcheck.File=${ambari.log.dir}/${ambari.dbcheck.file}
+log4j.appender.dbcheck.layout=org.apache.log4j.PatternLayout
+log4j.appender.dbcheck.layout.ConversionPattern=%d{ISO8601} %m%n
+
 # EclipsLink -> slf4j bridge
 log4j.logger.eclipselink=TRACE,eclipselink
 log4j.additivity.eclipselink=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/conf/windows/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-server/conf/windows/log4j.properties b/ambari-server/conf/windows/log4j.properties
index 8a69508..09505cf 100644
--- a/ambari-server/conf/windows/log4j.properties
+++ b/ambari-server/conf/windows/log4j.properties
@@ -23,6 +23,7 @@ ambari.log.file=ambari-server.log
 ambari.config-changes.file=ambari-config-changes.log
 ambari.alerts.file=ambari-alerts.log
 ambari.eclipselink.file=ambari-eclipselink.log
+ambari.dbcheck.file=ambari-server-check-database.log
 
 # Define the root logger to the system property "ambari.root.logger".
 log4j.rootLogger=${ambari.root.logger}
@@ -76,6 +77,14 @@ log4j.appender.alerts.File=${ambari.log.dir}\${ambari.alerts.file}
 log4j.appender.alerts.layout=org.apache.log4j.PatternLayout
 log4j.appender.alerts.layout.ConversionPattern=%d{ISO8601} %m%n
 
+# Log database check process
+log4j.logger.org.apache.ambari.server.checks.CheckDatabaseHelper=INFO, dbcheck
+log4j.additivity.org.apache.ambari.server.checks.CheckDatabaseHelper=false
+log4j.appender.dbcheck=org.apache.log4j.FileAppender
+log4j.appender.dbcheck.File=${ambari.log.dir}/${ambari.dbcheck.file}
+log4j.appender.dbcheck.layout=org.apache.log4j.PatternLayout
+log4j.appender.dbcheck.layout.ConversionPattern=%d{ISO8601} %m%n
+
 # EclipsLink -> slf4j bridge
 log4j.logger.eclipselink=TRACE,eclipselink
 log4j.additivity.eclipselink=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/src/main/conf/log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/conf/log4j.properties b/ambari-server/src/main/conf/log4j.properties
index 11e8d51..1211fe3 100644
--- a/ambari-server/src/main/conf/log4j.properties
+++ b/ambari-server/src/main/conf/log4j.properties
@@ -23,6 +23,7 @@ ambari.log.file=ambari-server.log
 ambari.config-changes.file=ambari-config-changes.log
 ambari.alerts.file=ambari-alerts.log
 ambari.eclipselink.file=ambari-eclipselink.log
+ambari.dbcheck.file=ambari-server-check-database.log
 
 # Define the root logger to the system property "ambari.root.logger".
 log4j.rootLogger=${ambari.root.logger}
@@ -76,6 +77,14 @@ log4j.appender.alerts.File=${ambari.log.dir}/${ambari.alerts.file}
 log4j.appender.alerts.layout=org.apache.log4j.PatternLayout
 log4j.appender.alerts.layout.ConversionPattern=%d{ISO8601} %m%n
 
+# Log database check process
+log4j.logger.org.apache.ambari.server.checks.CheckDatabaseHelper=INFO, dbcheck
+log4j.additivity.org.apache.ambari.server.checks.CheckDatabaseHelper=false
+log4j.appender.dbcheck=org.apache.log4j.FileAppender
+log4j.appender.dbcheck.File=${ambari.log.dir}/${ambari.dbcheck.file}
+log4j.appender.dbcheck.layout=org.apache.log4j.PatternLayout
+log4j.appender.dbcheck.layout.ConversionPattern=%d{ISO8601} %m%n
+
 # EclipsLink -> slf4j bridge
 log4j.logger.eclipselink=TRACE,eclipselink
 log4j.additivity.eclipselink=false

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDatabaseHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDatabaseHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDatabaseHelper.java
index 9213738..0396767 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDatabaseHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDatabaseHelper.java
@@ -37,11 +37,9 @@ import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
@@ -60,6 +58,7 @@ public class CheckDatabaseHelper {
   private Connection connection;
   private AmbariMetaInfo ambariMetaInfo;
   private Injector injector;
+  private boolean errorAvailable = false;
 
   @Inject
   public CheckDatabaseHelper(DBAccessor dbAccessor,
@@ -112,6 +111,14 @@ public class CheckDatabaseHelper {
     persistService.stop();
   }
 
+  protected boolean isErrorAvailable() {
+    return errorAvailable;
+  }
+
+  protected void setErrorAvailable(boolean errorAvailable) {
+    this.errorAvailable = errorAvailable;
+  }
+
   /*
   * This method checks if all configurations that we have in clusterconfig table
   * have at least one mapping in clusterconfigmapping table. If we found not mapped config
@@ -169,6 +176,7 @@ public class CheckDatabaseHelper {
       for (String clusterName : configsSelectedMoreThanOnce.keySet()) {
         LOG.error(String.format("You have config(s), in cluster %s, that is(are) selected more than once in clusterconfigmapping: %s",
                 clusterName ,StringUtils.join(configsSelectedMoreThanOnce.get(clusterName), ",")));
+        errorAvailable = true;
       }
     } catch (SQLException e) {
       LOG.error("Exception occurred during check for config selected more than ones procedure: ", e);
@@ -203,6 +211,7 @@ public class CheckDatabaseHelper {
 
       if (!hostsWithoutStatus.isEmpty()) {
         LOG.error("You have host(s) without status: " + StringUtils.join(hostsWithoutStatus, ","));
+        errorAvailable = true;
       }
     } catch (SQLException e) {
       LOG.error("Exception occurred during check for host without state procedure: ", e);
@@ -257,7 +266,8 @@ public class CheckDatabaseHelper {
       }
 
       if (hostComponentStateCount != hostComponentDesiredStateCount || hostComponentStateCount != mergedCount) {
-        LOG.error("Your host component state count not equals host component desired state count!");
+        LOG.error("Your host component states(hostcomponentstate table) count not equals host component desired states(hostcomponentdesiredstate table) count!");
+        errorAvailable = true;
       }
 
     } catch (SQLException e) {
@@ -284,27 +294,32 @@ public class CheckDatabaseHelper {
   * If any issue was discovered, we are showing error message for user.
   * */
   protected void checkServiceConfigs()  {
-    String GET_SERVICES_WITHOUT_CONFIGS_QUERY = "select service_name from clusterservices where service_name not in (select service_name from serviceconfig where group_id is null)";
+    String GET_SERVICES_WITHOUT_CONFIGS_QUERY = "select c.cluster_name, service_name from clusterservices cs " +
+            "join clusters c on cs.cluster_id=c.cluster_id " +
+            "where service_name not in (select service_name from serviceconfig sc where sc.cluster_id=cs.cluster_id and sc.service_name=cs.service_name and sc.group_id is null)";
     String GET_SERVICE_CONFIG_WITHOUT_MAPPING_QUERY = "select service_name from serviceconfig where service_config_id not in (select service_config_id from serviceconfigmapping) and group_id is null";
-    String GET_STACK_NAME_VERSION_QUERY = "select s.stack_name, s.stack_version from clusters c join stack s on c.desired_stack_id = s.stack_id";
-    String GET_SERVICES_WITH_CONFIGS_QUERY = "select cs.service_name, type_name, sc.version from clusterservices cs " +
-            "join serviceconfig sc on cs.service_name=sc.service_name " +
+    String GET_STACK_NAME_VERSION_QUERY = "select c.cluster_name, s.stack_name, s.stack_version from clusters c " +
+            "join stack s on c.desired_stack_id = s.stack_id";
+    String GET_SERVICES_WITH_CONFIGS_QUERY = "select c.cluster_name, cs.service_name, type_name, sc.version from clusterservices cs " +
+            "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " +
             "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " +
-            "join clusterconfig cc on scm.config_id=cc.config_id " +
+            "join clusterconfig cc on scm.config_id=cc.config_id and sc.cluster_id=cc.cluster_id " +
+            "join clusters c on cc.cluster_id=c.cluster_id " +
             "where sc.group_id is null " +
-            "group by cs.service_name, type_name, sc.version";
-    String GET_NOT_SELECTED_SERVICE_CONFIGS_QUERY = "select cs.service_name,cc.type_name from clusterservices cs " +
-            "join serviceconfig sc on cs.service_name=sc.service_name " +
+            "group by c.cluster_name, cs.service_name, type_name, sc.version";
+    String GET_NOT_SELECTED_SERVICE_CONFIGS_QUERY = "select c.cluster_name, cs.service_name,cc.type_name from clusterservices cs " +
+            "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " +
             "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " +
-            "join clusterconfig cc on scm.config_id=cc.config_id " +
-            "join clusterconfigmapping ccm on cc.type_name=ccm.type_name and cc.version_tag=ccm.version_tag " +
-            "where sc.group_id is null and sc.service_config_id = (select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name) " +
-            "group by cs.service_name,cc.type_name " +
+            "join clusterconfig cc on scm.config_id=cc.config_id and cc.cluster_id=sc.cluster_id " +
+            "join clusterconfigmapping ccm on cc.type_name=ccm.type_name and cc.version_tag=ccm.version_tag and cc.cluster_id=ccm.cluster_id " +
+            "join clusters c on ccm.cluster_id=c.cluster_id " +
+            "where sc.group_id is null and sc.service_config_id = (select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name and sc2.cluster_id=sc.cluster_id) " +
+            "group by c.cluster_name,cs.service_name,cc.type_name " +
             "having sum(ccm.selected) < 1";
-    String stackName = null, stackVersion = null;
-    Set<String> servicesWithoutConfigs = new HashSet<>();
+    Multimap<String, String> servicesWithoutConfigs = HashMultimap.create();
+    Map<String, Map<String, String>>  clusterStackInfo = new HashMap<>();
     Set<String> servicesWithoutMappedConfigs = new HashSet<>();
-    Map<String, List<String>> notSelectedServiceConfigs = new HashMap<>();
+    Map<String, Multimap<String, String>> notSelectedServiceConfigs = new HashMap<>();
     ResultSet rs = null;
 
     try {
@@ -313,12 +328,13 @@ public class CheckDatabaseHelper {
       rs = statement.executeQuery(GET_SERVICES_WITHOUT_CONFIGS_QUERY);
       if (rs != null) {
         while (rs.next()) {
-          servicesWithoutConfigs.add(rs.getString("service_name"));
+          servicesWithoutConfigs.put(rs.getString("cluster_name"), rs.getString("service_name"));
         }
       }
 
-      if (!servicesWithoutConfigs.isEmpty()) {
-        LOG.error("You have services without configs at all: " + StringUtils.join(servicesWithoutConfigs, ","));
+      for (String clusterName : servicesWithoutConfigs.keySet()) {
+        LOG.error(String.format("Service(s): %s, from cluster %s has no config(s) in serviceconfig table!", StringUtils.join(servicesWithoutConfigs.get(clusterName), ","), clusterName));
+        errorAvailable = true;
       }
 
       rs = statement.executeQuery(GET_SERVICE_CONFIG_WITHOUT_MAPPING_QUERY);
@@ -329,44 +345,63 @@ public class CheckDatabaseHelper {
       }
 
       if (!servicesWithoutMappedConfigs.isEmpty()) {
-        LOG.error("You have services without mapped configs: " + StringUtils.join(servicesWithoutMappedConfigs, ","));
+        LOG.error("You have service(s) without mapped configs in serviceconfigmapping: " + StringUtils.join(servicesWithoutMappedConfigs, ","));
+        errorAvailable = true;
       }
 
       rs = statement.executeQuery(GET_STACK_NAME_VERSION_QUERY);
       if (rs != null) {
         while (rs.next()) {
-          stackName = rs.getString("stack_name");
-          stackVersion = rs.getString("stack_version");
+          Map<String, String> stackInfoMap = new HashMap<>();
+          stackInfoMap.put(rs.getString("stack_name"), rs.getString("stack_version"));
+          clusterStackInfo.put(rs.getString("cluster_name"), stackInfoMap);
         }
       }
 
-      if (stackName != null && stackVersion != null) {
-        Set<String> serviceNames = new HashSet<>();
-        Map<Integer, Multimap<String, String>> dbServiceVersionConfigs = new HashMap<>();
-        Multimap<String, String> stackServiceConfigs = HashMultimap.create();
 
-        rs = statement.executeQuery(GET_SERVICES_WITH_CONFIGS_QUERY);
-        if (rs != null) {
-          String serviceName = null, configType = null;
-          Integer serviceVersion = null;
-          while (rs.next()) {
-            serviceName = rs.getString("service_name");
-            configType = rs.getString("type_name");
-            serviceVersion = rs.getInt("version");
+      Set<String> serviceNames = new HashSet<>();
+      Map<String, Map<Integer, Multimap<String, String>>> dbClusterServiceVersionConfigs = new HashMap<>();
+      Multimap<String, String> stackServiceConfigs = HashMultimap.create();
+
+      rs = statement.executeQuery(GET_SERVICES_WITH_CONFIGS_QUERY);
+      if (rs != null) {
+        String serviceName = null, configType = null, clusterName = null;
+        Integer serviceVersion = null;
+        while (rs.next()) {
+          clusterName = rs.getString("cluster_name");
+          serviceName = rs.getString("service_name");
+          configType = rs.getString("type_name");
+          serviceVersion = rs.getInt("version");
+
+          serviceNames.add(serviceName);
 
-            serviceNames.add(serviceName);
+          if (dbClusterServiceVersionConfigs.get(clusterName) != null) {
+            Map<Integer, Multimap<String, String>> dbServiceVersionConfigs = dbClusterServiceVersionConfigs.get(clusterName);
 
-            if (dbServiceVersionConfigs.get(serviceVersion) == null) {
+            if (dbServiceVersionConfigs.get(serviceVersion) != null) {
+              dbServiceVersionConfigs.get(serviceVersion).put(serviceName, configType);
+            } else {
               Multimap<String, String> dbServiceConfigs = HashMultimap.create();
               dbServiceConfigs.put(serviceName, configType);
               dbServiceVersionConfigs.put(serviceVersion, dbServiceConfigs);
-            } else {
-              dbServiceVersionConfigs.get(serviceVersion).put(serviceName, configType);
             }
+          } else {
+
+            Map<Integer, Multimap<String, String>> dbServiceVersionConfigs = new HashMap<>();
+            Multimap<String, String> dbServiceConfigs = HashMultimap.create();
+            dbServiceConfigs.put(serviceName, configType);
+            dbServiceVersionConfigs.put(serviceVersion, dbServiceConfigs);
+            dbClusterServiceVersionConfigs.put(clusterName, dbServiceVersionConfigs);
+
           }
         }
+      }
 
-
+      for (Map.Entry<String, Map<String, String>> clusterStackInfoEntry : clusterStackInfo.entrySet()) {
+        String clusterName = clusterStackInfoEntry.getKey();
+        Map<String, String> stackInfo = clusterStackInfoEntry.getValue();
+        String stackName = stackInfo.keySet().iterator().next();
+        String stackVersion = stackInfo.get(stackName);
         Map<String, ServiceInfo> serviceInfoMap = ambariMetaInfo.getServices(stackName, stackVersion);
         for (String serviceName : serviceNames) {
           ServiceInfo serviceInfo = serviceInfoMap.get(serviceName);
@@ -376,6 +411,7 @@ public class CheckDatabaseHelper {
           }
         }
 
+        Map<Integer, Multimap<String, String>> dbServiceVersionConfigs = dbClusterServiceVersionConfigs.get(clusterName);
         for (Integer serviceVersion : dbServiceVersionConfigs.keySet()) {
           Multimap<String, String> dbServiceConfigs = dbServiceVersionConfigs.get(serviceVersion);
           for (String serviceName : dbServiceConfigs.keySet()) {
@@ -384,33 +420,45 @@ public class CheckDatabaseHelper {
             if (serviceConfigsFromDB != null && serviceConfigsFromStack != null) {
               serviceConfigsFromStack.removeAll(serviceConfigsFromDB);
               if (!serviceConfigsFromStack.isEmpty()) {
-                LOG.error(String.format("Required config(s): %s is(are) not available for service %s with service config version %s",
-                        StringUtils.join(serviceConfigsFromStack, ","), serviceName, Integer.toString(serviceVersion)));
+                LOG.error(String.format("Required config(s): %s is(are) not available for service %s with service config version %s for cluster %s",
+                        StringUtils.join(serviceConfigsFromStack, ","), serviceName, Integer.toString(serviceVersion), clusterName));
+                errorAvailable = true;
               }
             }
           }
         }
       }
 
+
       rs = statement.executeQuery(GET_NOT_SELECTED_SERVICE_CONFIGS_QUERY);
       if (rs != null) {
-        String serviceName = null, configType = null;
+        String serviceName = null, configType = null, clusterName = null;
         while (rs.next()) {
+          clusterName = rs.getString("cluster_name");
           serviceName = rs.getString("service_name");
           configType = rs.getString("type_name");
 
-          if (notSelectedServiceConfigs.get(serviceName) != null) {
-            notSelectedServiceConfigs.get(serviceName).add(configType);
+
+          if (notSelectedServiceConfigs.get(clusterName) != null) {
+            Multimap<String, String> serviceConfigs = notSelectedServiceConfigs.get(clusterName);
+            serviceConfigs.put(serviceName, configType);
           } else {
-            List<String> configTypes = new ArrayList<>();
-            configTypes.add(configType);
-            notSelectedServiceConfigs.put(serviceName, configTypes);
+
+            Multimap<String, String> serviceConfigs = HashMultimap.create();
+            serviceConfigs.put(serviceName, configType);
+            notSelectedServiceConfigs.put(clusterName, serviceConfigs);
+
           }
+
         }
       }
 
-      for (String serviceName : notSelectedServiceConfigs.keySet()) {
-        LOG.error(String.format("You have non selected configs: %s for service %s.", StringUtils.join(notSelectedServiceConfigs.get(serviceName), ","), serviceName));
+      for (String clusterName : notSelectedServiceConfigs.keySet()) {
+        Multimap<String, String> serviceConfig = notSelectedServiceConfigs.get(clusterName);
+        for (String serviceName : serviceConfig.keySet()) {
+          LOG.error(String.format("You have non selected configs: %s for service %s from cluster %s!", StringUtils.join(serviceConfig.get(serviceName), ","), serviceName, clusterName));
+          errorAvailable = true;
+        }
       }
     } catch (SQLException e) {
       LOG.error("Exception occurred during complex service check procedure: ", e);
@@ -467,6 +515,11 @@ public class CheckDatabaseHelper {
     } finally {
       if (checkDatabaseHelper != null) {
         checkDatabaseHelper.closeConnection();
+        if (checkDatabaseHelper.isErrorAvailable()) {
+          System.out.print("Some error(s) was(were) found. Please check ambari-server-check-database.log for problem(s).");
+        } else {
+          System.out.print("No erros were found.");
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/src/main/python/ambari_server/checkDatabase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/checkDatabase.py b/ambari-server/src/main/python/ambari_server/checkDatabase.py
index af1d74f..80eecc2 100644
--- a/ambari-server/src/main/python/ambari_server/checkDatabase.py
+++ b/ambari-server/src/main/python/ambari_server/checkDatabase.py
@@ -39,8 +39,7 @@ from ambari_server.serverUtils import is_server_runing
 from ambari_server.userInput import get_YN_input
 
 CHECK_DATABASE_HELPER_CMD = "{0} -cp {1} " + \
-                         "org.apache.ambari.server.checks.CheckDatabaseHelper" + \
-                         " > " + configDefaults.SERVER_LOG_FILE + " 2>&1"
+                         "org.apache.ambari.server.checks.CheckDatabaseHelper"
 
 def check_database(options):
 
@@ -71,10 +70,10 @@ def check_database(options):
   print_info_msg("Return code from check database command, retcode = " + str(retcode))
 
   if retcode > 0:
-    print_error_msg("Database check failed to complete. Please check ambari-server.log for problem.")
+    print_error_msg("Database check failed to complete. Please check ambari-server.log and ambari-server-check-database.log for problem.")
     raise FatalException(1, 'Database check failed.')
   else:
-    print_info_msg('Check database completed successfully. Please check ambari-server.log for results.')
+    print str(stdout)
 
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/src/test/java/org/apache/ambari/server/checks/CheckDatabaseHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/checks/CheckDatabaseHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/checks/CheckDatabaseHelperTest.java
index 1c2765c..67594b7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/checks/CheckDatabaseHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/checks/CheckDatabaseHelperTest.java
@@ -260,22 +260,27 @@ public class CheckDatabaseHelperTest {
     expect(stackResultSet.getString("stack_version")).andReturn("2.2");
     expect(mockDBDbAccessor.getConnection()).andReturn(mockConnection);
     expect(mockConnection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)).andReturn(mockStatement);
-    expect(mockStatement.executeQuery("select service_name from clusterservices where service_name not in (select service_name from serviceconfig where group_id is null)")).andReturn(mockResultSet);
+    expect(mockStatement.executeQuery("select c.cluster_name, service_name from clusterservices cs " +
+            "join clusters c on cs.cluster_id=c.cluster_id " +
+            "where service_name not in (select service_name from serviceconfig sc where sc.cluster_id=cs.cluster_id and sc.service_name=cs.service_name and sc.group_id is null)")).andReturn(mockResultSet);
     expect(mockStatement.executeQuery("select service_name from serviceconfig where service_config_id not in (select service_config_id from serviceconfigmapping) and group_id is null")).andReturn(mockResultSet);
-    expect(mockStatement.executeQuery("select s.stack_name, s.stack_version from clusters c join stack s on c.desired_stack_id = s.stack_id")).andReturn(stackResultSet);
-    expect(mockStatement.executeQuery("select cs.service_name, type_name, sc.version from clusterservices cs " +
-            "join serviceconfig sc on cs.service_name=sc.service_name " +
+    expect(mockStatement.executeQuery("select c.cluster_name, s.stack_name, s.stack_version from clusters c " +
+            "join stack s on c.desired_stack_id = s.stack_id")).andReturn(stackResultSet);
+    expect(mockStatement.executeQuery("select c.cluster_name, cs.service_name, type_name, sc.version from clusterservices cs " +
+            "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " +
             "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " +
-            "join clusterconfig cc on scm.config_id=cc.config_id " +
+            "join clusterconfig cc on scm.config_id=cc.config_id and sc.cluster_id=cc.cluster_id " +
+            "join clusters c on cc.cluster_id=c.cluster_id " +
             "where sc.group_id is null " +
-            "group by cs.service_name, type_name, sc.version")).andReturn(serviceConfigResultSet);
-    expect(mockStatement.executeQuery("select cs.service_name,cc.type_name from clusterservices cs " +
-            "join serviceconfig sc on cs.service_name=sc.service_name " +
+            "group by c.cluster_name, cs.service_name, type_name, sc.version")).andReturn(serviceConfigResultSet);
+    expect(mockStatement.executeQuery("select c.cluster_name, cs.service_name,cc.type_name from clusterservices cs " +
+            "join serviceconfig sc on cs.service_name=sc.service_name and cs.cluster_id=sc.cluster_id " +
             "join serviceconfigmapping scm on sc.service_config_id=scm.service_config_id " +
-            "join clusterconfig cc on scm.config_id=cc.config_id " +
-            "join clusterconfigmapping ccm on cc.type_name=ccm.type_name and cc.version_tag=ccm.version_tag " +
-            "where sc.group_id is null and sc.service_config_id = (select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name) " +
-            "group by cs.service_name,cc.type_name " +
+            "join clusterconfig cc on scm.config_id=cc.config_id and cc.cluster_id=sc.cluster_id " +
+            "join clusterconfigmapping ccm on cc.type_name=ccm.type_name and cc.version_tag=ccm.version_tag and cc.cluster_id=ccm.cluster_id " +
+            "join clusters c on ccm.cluster_id=c.cluster_id " +
+            "where sc.group_id is null and sc.service_config_id = (select max(service_config_id) from serviceconfig sc2 where sc2.service_name=sc.service_name and sc2.cluster_id=sc.cluster_id) " +
+            "group by c.cluster_name,cs.service_name,cc.type_name " +
             "having sum(ccm.selected) < 1")).andReturn(mockResultSet);
 
     CheckDatabaseHelper checkDatabaseHelper = new CheckDatabaseHelper(mockDBDbAccessor, mockInjector, null);

http://git-wip-us.apache.org/repos/asf/ambari/blob/4761fe7c/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 2195ef0..f52edc3 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -6901,8 +6901,7 @@ class TestAmbariServer(TestCase):
     self.assertTrue(ensureCanStartUnderCurrentUserMock.called)
     self.assertTrue(generateEnvMock.called)
 
-    self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 org.apache.ambari.server.checks.CheckDatabaseHelper'
-                                                        ' > /var/log/ambari-server/ambari-server.log 2>&1')
+    self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 org.apache.ambari.server.checks.CheckDatabaseHelper')
 
     pass
 


[25/33] ambari git commit: AMBARI-15079. Preupload.py should pre-create hdfs directories (aonishuk)

Posted by nc...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 185db68..69e1f5d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -60,6 +60,7 @@ class TestHistoryServer(RMFTestCase):
     self.assertResourceCalled("HdfsResource", "/apps/tez/",
                           type="directory",
                           action=["create_on_execute"],
+                          hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                           user=u"hdfs",
                           dfs_type = '',
                           owner=u"tez",
@@ -77,6 +78,7 @@ class TestHistoryServer(RMFTestCase):
     self.assertResourceCalled("HdfsResource", "/apps/tez/lib/",
                               type="directory",
                               action=["create_on_execute"],
+                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               user=u'hdfs',
                               owner=u"tez",
                               dfs_type = '',
@@ -93,6 +95,7 @@ class TestHistoryServer(RMFTestCase):
 
     self.assertResourceCalled("HdfsResource", None,
                               action=['execute'],
+                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               user=u'hdfs',
                               hadoop_bin_dir="/usr/bin",
                               dfs_type = '',
@@ -205,7 +208,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/tmp/entity-file-history/active',
@@ -222,7 +225,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/mapred',
         security_enabled = False,
@@ -234,7 +237,7 @@ class TestHistoryServer(RMFTestCase):
         owner = 'mapred',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/mapred/system',
         security_enabled = False,
@@ -246,7 +249,7 @@ class TestHistoryServer(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/mr-history/done',
         security_enabled = False,
@@ -260,7 +263,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -270,7 +273,7 @@ class TestHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/mapreduce/jhs',
@@ -451,7 +454,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/tmp/entity-file-history/active',
@@ -468,7 +471,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/mapred',
         security_enabled = True,
@@ -480,7 +483,7 @@ class TestHistoryServer(RMFTestCase):
         owner = 'mapred',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/mapred/system',
         security_enabled = True,
@@ -492,7 +495,7 @@ class TestHistoryServer(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/mr-history/done',
         security_enabled = True,
@@ -506,7 +509,7 @@ class TestHistoryServer(RMFTestCase):
         group = 'hadoop',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -516,7 +519,7 @@ class TestHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/mapreduce/jhs',
@@ -841,7 +844,7 @@ class TestHistoryServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
index 54ecf33..3618ab8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_service_check.py
@@ -46,7 +46,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
     )
@@ -58,7 +58,7 @@ class TestServiceCheck(RMFTestCase):
         source = '/etc/passwd',
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
     )
@@ -69,7 +69,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples-2.*.jar wordcount /user/ambari-qa/mapredsmokeinput /user/ambari-qa/mapredsmokeoutput',
@@ -103,7 +103,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
     )
@@ -115,7 +115,7 @@ class TestServiceCheck(RMFTestCase):
         source = '/etc/passwd',
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
     )
@@ -126,7 +126,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index b14a527..10c721f 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -141,7 +141,7 @@ class TestFalconServer(RMFTestCase):
         owner = 'falcon',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon/store',
@@ -161,7 +161,7 @@ class TestFalconServer(RMFTestCase):
         type = 'directory',
         recursive_chown = True,
         recursive_chmod = True,
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0770,
         source='/usr/hdp/current/falcon-server/data-mirroring'
     )
@@ -173,7 +173,7 @@ class TestFalconServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
@@ -327,7 +327,7 @@ class TestFalconServer(RMFTestCase):
         owner = 'falcon',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon/store',
@@ -351,7 +351,7 @@ class TestFalconServer(RMFTestCase):
         group = 'users',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -364,7 +364,7 @@ class TestFalconServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
index 728a1a8..399d1e2 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
@@ -47,7 +47,7 @@ class TestTezServiceCheck(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
 
     self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput',
@@ -60,7 +60,7 @@ class TestTezServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput/sample-tez-test',
         security_enabled = False,
@@ -73,7 +73,7 @@ class TestTezServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -82,7 +82,7 @@ class TestTezServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
@@ -125,7 +125,7 @@ class TestTezServiceCheck(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = 'hdfs',
                               user = 'hdfs',
-                              action = ['delete_on_execute'],
+                              action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
                               )
@@ -142,7 +142,7 @@ class TestTezServiceCheck(RMFTestCase):
                               owner = 'ambari-qa',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               )
     self.assertResourceCalled('HdfsResource', '/tmp/tezsmokeinput/sample-tez-test',
                               security_enabled = True,
@@ -158,7 +158,7 @@ class TestTezServiceCheck(RMFTestCase):
                               owner = 'ambari-qa',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'file',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               )
     self.assertResourceCalled('HdfsResource', None,
                               security_enabled = True,
@@ -170,7 +170,7 @@ class TestTezServiceCheck(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = 'hdfs',
                               user = 'hdfs',
-                              action = ['execute'],
+                              action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
index 4fccd91..4af5f83 100644
--- a/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
+++ b/ambari-server/src/test/python/stacks/2.1/YARN/test_apptimelineserver.py
@@ -197,7 +197,7 @@ class TestAppTimelineServer(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = UnknownConfigurationMock(),
                               user = 'hdfs',
-                              action = ['execute'],
+                              action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index bf42f9b..bfc8f31 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -50,7 +50,7 @@ class TestPigServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['delete_on_execute'],
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
         security_enabled = True,
@@ -66,7 +66,7 @@ class TestPigServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
@@ -78,7 +78,7 @@ class TestPigServiceCheck(RMFTestCase):
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',
@@ -113,7 +113,7 @@ class TestPigServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['delete_on_execute'],
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
         security_enabled = True,
@@ -129,7 +129,7 @@ class TestPigServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
 
     copy_to_hdfs_mock.assert_called_with("tez", "hadoop", "hdfs", host_sys_prepped=False)
@@ -143,7 +143,7 @@ class TestPigServiceCheck(RMFTestCase):
         principal_name = 'hdfs@EXAMPLE.COM',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index f7cd3b5..73322a1 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -63,7 +63,7 @@ class TestJobHistoryServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
@@ -118,6 +118,7 @@ class TestJobHistoryServer(RMFTestCase):
 
     self.assertResourceCalled('HdfsResource', None,
         action=['execute'],
+        hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         default_fs= UnknownConfigurationMock(),
         hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
         hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
@@ -180,7 +181,7 @@ class TestJobHistoryServer(RMFTestCase):
         owner = 'spark',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0775,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -193,7 +194,7 @@ class TestJobHistoryServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
@@ -251,7 +252,7 @@ class TestJobHistoryServer(RMFTestCase):
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         dfs_type = '',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0775,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -264,7 +265,7 @@ class TestJobHistoryServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index c9f376b..878b7f8 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -47,7 +47,7 @@ class TestMahoutClient(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
     )
@@ -61,7 +61,7 @@ class TestMahoutClient(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
         security_enabled = False,
@@ -74,7 +74,7 @@ class TestMahoutClient(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'file',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -83,7 +83,7 @@ class TestMahoutClient(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index 8e7d367..f9c741d 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -102,7 +102,7 @@ class TestSparkThriftServer(RMFTestCase):
         owner = 'spark',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
         mode = 0775,
     )
@@ -115,7 +115,7 @@ class TestSparkThriftServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
index 0b3d434..f826b55 100644
--- a/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
+++ b/ambari-server/src/test/python/stacks/2.3/YARN/test_ats_1_5.py
@@ -165,7 +165,7 @@ class TestAts(RMFTestCase):
                               group = 'hadoop',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0755,
                               )
     self.assertResourceCalled('HdfsResource', '/ats/done',
@@ -182,7 +182,7 @@ class TestAts(RMFTestCase):
                               group = 'hadoop',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0700,
                               )
     self.assertResourceCalled('HdfsResource', '/ats',
@@ -200,7 +200,7 @@ class TestAts(RMFTestCase):
                               group = 'hadoop',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0755,
                               )
     self.assertResourceCalled('HdfsResource', '/ats/active',
@@ -217,7 +217,7 @@ class TestAts(RMFTestCase):
                               group = 'hadoop',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 01777,
                               )
     self.assertResourceCalled('HdfsResource', None,
@@ -230,7 +230,7 @@ class TestAts(RMFTestCase):
                               kinit_path_local = '/usr/bin/kinit',
                               principal_name = UnknownConfigurationMock(),
                               user = 'hdfs',
-                              action = ['execute'],
+                              action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
     self.assertResourceCalled('File', '/etc/hadoop/conf/yarn.exclude',


[22/33] ambari git commit: AMBARI-15001: Hdfs keytab for hawq service check on secured cluster

Posted by nc...@apache.org.
AMBARI-15001: Hdfs keytab for hawq service check on secured cluster


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0ea255c5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0ea255c5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0ea255c5

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 0ea255c59005eda4d5684230460d8b1c647c78f7
Parents: 5a9bb71
Author: Jun Aoki <ja...@apache.org>
Authored: Wed Feb 17 12:27:50 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Wed Feb 17 12:27:50 2016 -0800

----------------------------------------------------------------------
 .../common-services/HAWQ/2.0.0/kerberos.json    | 125 +++++++++----------
 1 file changed, 56 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0ea255c5/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/kerberos.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/kerberos.json b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/kerberos.json
index cc11c15..da11986 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/kerberos.json
@@ -1,73 +1,60 @@
 {
-    "services": [
+  "services": [
+    {
+      "name": "HAWQ",
+      "identities": [
         {
-            "name": "HAWQ",
-            "identities": [
-                {
-                    "name": "/hdfs"
-                }
-            ], 
-            "configurations": [
-                {
-                    "hawq-site": {
-                        "enable_secure_filesystem": "ON",
-                        "krb_server_keyfile": "${keytab_dir}/hawq.service.keytab"
-                    }
-                },
-                {
-                    "hdfs-client": {
-                        "hadoop.security.authentication": "kerberos"
-                    }
-                }
-            ],
-            "components": [
-                {
-                    "identities": [
-                        {
-                            "keytab": {
-                                "file": "${keytab_dir}/hawq.service.keytab", 
-                                "group": {
-                                    "access": "", 
-                                    "name": "${cluster-env/user_group}"
-                                }, 
-                                "owner": {
-                                    "access": "r", 
-                                    "name": "gpadmin"
-                                }
-                            }, 
-                            "name": "hawq_master_hawq", 
-                            "principal": {
-                                "type": "service", 
-                                "value": "postgres@${realm}"
-                            }
-                        }
-                    ], 
-                    "name": "HAWQMASTER"
-                }, 
-                {
-                    "identities": [
-                        {
-                            "keytab": {
-                                "file": "${keytab_dir}/hawq.service.keytab", 
-                                "group": {
-                                    "access": "", 
-                                    "name": "${cluster-env/user_group}"
-                                }, 
-                                "owner": {
-                                    "access": "r", 
-                                    "name": "gpadmin"
-                                }
-                            }, 
-                            "name": "hawq_standby_hawq", 
-                            "principal": {
-                                "type": "service", 
-                                "value": "postgres@${realm}"
-                            }
-                        }
-                    ], 
-                    "name": "HAWQSTANDBY"
-                }
-            ] 
+          "name": "/HDFS/NAMENODE/hdfs"
+        },
+        {
+          "name": "hawq_identity",
+          "principal": {
+            "type": "user",
+            "value": "postgres@${realm}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/hawq.service.keytab",
+            "owner": {
+              "access": "r",
+              "name": "gpadmin"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}"
+            }
+          }
+        }
+      ],
+      "configurations": [
+        {
+          "hawq-site": {
+            "enable_secure_filesystem": "ON",
+            "krb_server_keyfile": "${keytab_dir}/hawq.service.keytab"
+          }
+        },
+        {
+          "hdfs-client": {
+            "hadoop.security.authentication": "kerberos"
+          }
+        }
+      ],
+      "components" : [
+        {
+          "name": "HAWQMASTER",
+          "identities": [
+            {
+              "name": "/HAWQ/hawq_identity"
+            }
+          ]
+        },
+        {
+          "name": "HAWQSTANDBY",
+          "identities": [
+            {
+              "name": "/HAWQ/hawq_identity"
+            }
+          ]
         }
-    ]
+      ]
+    }
+  ]
 }


[02/33] ambari git commit: Updated team page. (yusaku)

Posted by nc...@apache.org.
Updated team page. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be1b6b9b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be1b6b9b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be1b6b9b

Branch: refs/heads/branch-dev-patch-upgrade
Commit: be1b6b9b1cb419725edb8e647d61d48756d184e4
Parents: 0ce5fea
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Feb 16 10:30:42 2016 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Feb 16 10:30:42 2016 -0800

----------------------------------------------------------------------
 docs/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be1b6b9b/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index a1b0c79..c3bb326 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -433,7 +433,7 @@
         <developer>
             <id>kzhang</id>
             <name>Kan Zhang</name>
-            <email>kanzhangmail@yahoo.com</email>
+            <email>kzhang@apache.org</email>
             <timezone>-8</timezone>
             <roles>
                 <role>PMC</role>


[10/33] ambari git commit: AMBARI-14997: Update gpcheck.cnf to hawq_check.cnf file as recommended in HAWQ documentation (bhuvnesh2703 via jaoki)

Posted by nc...@apache.org.
AMBARI-14997: Update gpcheck.cnf to hawq_check.cnf file as recommended in HAWQ documentation (bhuvnesh2703 via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/529c588a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/529c588a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/529c588a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 529c588add78ce4d0a82542144ba94f89c80299e
Parents: d412ca1
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 16 16:27:02 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 16 16:27:02 2016 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0/configuration/gpcheck-env.xml    | 89 --------------------
 .../HAWQ/2.0.0/configuration/hawq-check-env.xml | 89 ++++++++++++++++++++
 .../common-services/HAWQ/2.0.0/metainfo.xml     |  2 +-
 .../2.0.0/package/scripts/hawq_constants.py     |  2 +-
 .../HAWQ/2.0.0/package/scripts/master_helper.py |  2 +-
 .../HAWQ/2.0.0/package/scripts/params.py        |  2 +-
 .../stacks/2.3/common/services-hawq-1-host.json |  2 +-
 .../2.3/common/services-hawq-3-hosts.json       |  2 +-
 .../2.3/common/services-hawq-pxf-hdfs.json      |  4 +-
 .../services-master_ambari_colo-3-hosts.json    |  2 +-
 .../services-master_standby_colo-3-hosts.json   |  2 +-
 .../common/services-normal-hawq-3-hosts.json    |  2 +-
 .../services-standby_ambari_colo-3-hosts.json   |  2 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |  4 +-
 ambari-web/app/models/stack_service.js          |  4 +-
 15 files changed, 105 insertions(+), 105 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
deleted file mode 100755
index 309a0c7..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
+++ /dev/null
@@ -1,89 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<configuration supports_adding_forbidden="true">
-  <!-- gpcheck.cnf -->
-    <property>
-      <name>content</name>
-      <display-name>Content</display-name>
-      <description>Contents of the configuration file /usr/local/hawq/etc/gpcheck.cnf. This file is used by 'hawq check' command, which can be run manually by gpadmin user on the HAWQ master host. This command validates the system parameters and HDFS parameters mentioned in this file to ensure optimal HAWQ operation.</description>
-        <value>
-[global]
-configfile_version = 4
-
-[linux.mount]
-mount.points = /
-
-[linux.sysctl]
-sysctl.kernel.shmmax = 500000000
-sysctl.kernel.shmmni = 4096
-sysctl.kernel.shmall = 4000000000
-sysctl.kernel.sem = 250 512000 100 2048
-sysctl.kernel.sysrq = 1
-sysctl.kernel.core_uses_pid = 1
-sysctl.kernel.msgmnb = 65536
-sysctl.kernel.msgmax = 65536
-sysctl.kernel.msgmni = 2048
-sysctl.net.ipv4.tcp_syncookies = 0
-sysctl.net.ipv4.ip_forward = 0
-sysctl.net.ipv4.conf.default.accept_source_route = 0
-sysctl.net.ipv4.tcp_tw_recycle = 1
-sysctl.net.ipv4.tcp_max_syn_backlog = 200000
-sysctl.net.ipv4.conf.all.arp_filter = 1
-sysctl.net.ipv4.ip_local_port_range = 1281 65535
-sysctl.net.core.netdev_max_backlog = 200000
-sysctl.vm.overcommit_memory = 2
-sysctl.fs.nr_open = 3000000
-sysctl.kernel.threads-max = 798720
-sysctl.kernel.pid_max = 798720
-# increase network
-sysctl.net.core.rmem_max = 2097152
-sysctl.net.core.wmem_max = 2097152
-
-[linux.limits]
-soft.nofile = 2900000
-hard.nofile = 2900000
-soft.nproc  = 131072
-hard.nproc  = 131072
-
-[linux.diskusage]
-diskusage.monitor.mounts = /
-diskusage.monitor.usagemax = 90%
-
-[hdfs]
-dfs.mem.namenode.heap = 40960
-dfs.mem.datanode.heap = 6144
-# in hdfs-site.xml
-dfs.support.append = true
-dfs.client.enable.read.from.local = true
-dfs.block.local-path-access.user = gpadmin
-dfs.datanode.max.transfer.threads = 40960
-dfs.client.socket-timeout = 300000000
-dfs.datanode.socket.write.timeout = 7200000
-dfs.namenode.handler.count = 60
-ipc.server.handler.queue.size = 3300
-dfs.datanode.handler.count = 60
-ipc.client.connection.maxidletime = 3600000
-dfs.namenode.accesstime.precision = -1
-    </value>
-    <value-attributes>
-        <type>content</type>
-    </value-attributes>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-check-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-check-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-check-env.xml
new file mode 100755
index 0000000..185d175
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-check-env.xml
@@ -0,0 +1,89 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration supports_adding_forbidden="true">
+  <!-- hawq_check.cnf -->
+    <property>
+      <name>content</name>
+      <display-name>Content</display-name>
+      <description>Contents of the configuration file /usr/local/hawq/etc/hawq_check.cnf. This file is used by 'hawq check' command, which can be run manually by gpadmin user on the HAWQ master host. This command validates the system parameters and HDFS parameters mentioned in this file to ensure optimal HAWQ operation.</description>
+        <value>
+[global]
+configfile_version = 4
+
+[linux.mount]
+mount.points = /
+
+[linux.sysctl]
+sysctl.kernel.shmmax = 500000000
+sysctl.kernel.shmmni = 4096
+sysctl.kernel.shmall = 4000000000
+sysctl.kernel.sem = 250 512000 100 2048
+sysctl.kernel.sysrq = 1
+sysctl.kernel.core_uses_pid = 1
+sysctl.kernel.msgmnb = 65536
+sysctl.kernel.msgmax = 65536
+sysctl.kernel.msgmni = 2048
+sysctl.net.ipv4.tcp_syncookies = 0
+sysctl.net.ipv4.ip_forward = 0
+sysctl.net.ipv4.conf.default.accept_source_route = 0
+sysctl.net.ipv4.tcp_tw_recycle = 1
+sysctl.net.ipv4.tcp_max_syn_backlog = 200000
+sysctl.net.ipv4.conf.all.arp_filter = 1
+sysctl.net.ipv4.ip_local_port_range = 1281 65535
+sysctl.net.core.netdev_max_backlog = 200000
+sysctl.vm.overcommit_memory = 2
+sysctl.fs.nr_open = 3000000
+sysctl.kernel.threads-max = 798720
+sysctl.kernel.pid_max = 798720
+# increase network
+sysctl.net.core.rmem_max = 2097152
+sysctl.net.core.wmem_max = 2097152
+
+[linux.limits]
+soft.nofile = 2900000
+hard.nofile = 2900000
+soft.nproc  = 131072
+hard.nproc  = 131072
+
+[linux.diskusage]
+diskusage.monitor.mounts = /
+diskusage.monitor.usagemax = 90%
+
+[hdfs]
+dfs.mem.namenode.heap = 40960
+dfs.mem.datanode.heap = 6144
+# in hdfs-site.xml
+dfs.support.append = true
+dfs.client.enable.read.from.local = true
+dfs.block.local-path-access.user = gpadmin
+dfs.datanode.max.transfer.threads = 40960
+dfs.client.socket-timeout = 300000000
+dfs.datanode.socket.write.timeout = 7200000
+dfs.namenode.handler.count = 60
+ipc.server.handler.queue.size = 3300
+dfs.datanode.handler.count = 60
+ipc.client.connection.maxidletime = 3600000
+dfs.namenode.accesstime.precision = -1
+    </value>
+    <value-attributes>
+        <type>content</type>
+    </value-attributes>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
index fd145f4..656d3a1 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
@@ -152,7 +152,7 @@
       <configuration-dependencies>
         <config-type>hawq-env</config-type>
         <config-type>hawq-site</config-type>
-        <config-type>gpcheck-env</config-type>
+        <config-type>hawq-check-env</config-type>
         <config-type>hdfs-client</config-type>
         <config-type>yarn-client</config-type>
         <config-type>hawq-limits-env</config-type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawq_constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawq_constants.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawq_constants.py
index 6a2d9ba..9fdbde5 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawq_constants.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawq_constants.py
@@ -48,7 +48,7 @@ sysctl_conf_dir = "/etc/sysctl.d"
 hawq_slaves_file = os.path.join(hawq_config_dir, "slaves")
 hawq_greenplum_path_file = os.path.join(hawq_home_dir, "greenplum_path.sh")
 hawq_hosts_file = "/tmp/hawq_hosts"
-hawq_check_file = os.path.join(hawq_config_dir, "gpcheck.cnf")
+hawq_check_file = os.path.join(hawq_config_dir, "hawq_check.cnf")
 sysctl_suse_file = "/etc/sysctl.conf"
 sysctl_backup_file = "/etc/sysctl.conf.backup.{0}"
 hawq_sysctl_filename = "hawq_sysctl.conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
index 2e0b6da..c5f891b 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
@@ -34,7 +34,7 @@ def __setup_master_specific_conf_files():
   """
   import params
 
-  File(hawq_constants.hawq_check_file, content=params.gpcheck_content, owner=hawq_constants.hawq_user, group=hawq_constants.hawq_group,
+  File(hawq_constants.hawq_check_file, content=params.hawq_check_content, owner=hawq_constants.hawq_user, group=hawq_constants.hawq_group,
       mode=0644)
 
   File(hawq_constants.hawq_slaves_file, content=Template("slaves.j2"), owner=hawq_constants.hawq_user, group=hawq_constants.hawq_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index add3c63..48b933e 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -107,7 +107,7 @@ rm_host = __get_component_host('rm_host')
 yarn_ha_enabled = default('/configurations/yarn-site/yarn.resourcemanager.ha.enabled', False)
 
 # Config files
-gpcheck_content = config['configurations']['gpcheck-env']['content']
+hawq_check_content = config['configurations']['hawq-check-env']['content']
 # database user limits
 hawq_limits = config['configurations']['hawq-limits-env']
 # sysctl parameters

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-1-host.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-1-host.json b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-1-host.json
index 09e305f..515ba7d 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-1-host.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-1-host.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-3-hosts.json
index 09e305f..515ba7d 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-3-hosts.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-pxf-hdfs.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-pxf-hdfs.json b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-pxf-hdfs.json
index 4cd2d4d..0bf459d 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-hawq-pxf-hdfs.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-hawq-pxf-hdfs.json
@@ -111,7 +111,7 @@
             "service_name" : "HAWQ",
             "stack_name" : "HDP",
             "stack_version" : "2.3",
-            "type" : "gpcheck-env.xml"
+            "type" : "hawq-check-env.xml"
           },
           "dependencies" : [ ]
         },
@@ -3487,4 +3487,4 @@
       ]
     }
   ]
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-master_ambari_colo-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-master_ambari_colo-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-master_ambari_colo-3-hosts.json
index 560e1db..1657ccf 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-master_ambari_colo-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-master_ambari_colo-3-hosts.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-master_standby_colo-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-master_standby_colo-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-master_standby_colo-3-hosts.json
index 93d6ec1..cd5d02c 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-master_standby_colo-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-master_standby_colo-3-hosts.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
index d10a976..5495d77 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-normal-hawq-3-hosts.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-server/src/test/python/stacks/2.3/common/services-standby_ambari_colo-3-hosts.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/common/services-standby_ambari_colo-3-hosts.json b/ambari-server/src/test/python/stacks/2.3/common/services-standby_ambari_colo-3-hosts.json
index 170c740..92a8e58 100644
--- a/ambari-server/src/test/python/stacks/2.3/common/services-standby_ambari_colo-3-hosts.json
+++ b/ambari-server/src/test/python/stacks/2.3/common/services-standby_ambari_colo-3-hosts.json
@@ -113,7 +113,7 @@
         "service_name" : "HAWQ",
         "stack_name" : "HDP",
         "stack_version" : "2.3",
-        "type" : "gpcheck-env.xml"
+        "type" : "hawq-check-env.xml"
       },
       "dependencies" : [ ]
     }, {

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-web/app/data/HDP2.3/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2.3/site_properties.js b/ambari-web/app/data/HDP2.3/site_properties.js
index 61cbe88..c5fb525 100644
--- a/ambari-web/app/data/HDP2.3/site_properties.js
+++ b/ambari-web/app/data/HDP2.3/site_properties.js
@@ -342,8 +342,8 @@ hdp23properties.push({
   {
     "name": "content",
     "serviceName": "HAWQ",
-    "filename": "gpcheck-env.xml",
-    "category": "AdvancedGpcheck"
+    "filename": "hawq-check-env.xml",
+    "category": "AdvancedHawqCheck"
   }
 );
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/529c588a/ambari-web/app/models/stack_service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/stack_service.js b/ambari-web/app/models/stack_service.js
index 5814386..281e0d8 100644
--- a/ambari-web/app/models/stack_service.js
+++ b/ambari-web/app/models/stack_service.js
@@ -159,7 +159,7 @@ App.StackService = DS.Model.extend({
     var configTypes = this.get('configTypes');
     var serviceComponents = this.get('serviceComponents');
     if (configTypes && Object.keys(configTypes).length) {
-      var pattern = ["General", "CapacityScheduler", "FaultTolerance", "Isolation", "Performance", "HIVE_SERVER2", "KDC", "Kadmin","^Advanced", "Env$", "^Custom", "Falcon - Oozie integration", "FalconStartupSite", "FalconRuntimeSite", "MetricCollector", "Settings$", "AdvancedGpcheck"];
+      var pattern = ["General", "CapacityScheduler", "FaultTolerance", "Isolation", "Performance", "HIVE_SERVER2", "KDC", "Kadmin","^Advanced", "Env$", "^Custom", "Falcon - Oozie integration", "FalconStartupSite", "FalconRuntimeSite", "MetricCollector", "Settings$", "AdvancedHawqCheck"];
       configCategories = App.StackService.configCategories.call(this).filter(function (_configCategory) {
         var serviceComponentName = _configCategory.get('name');
         var isServiceComponent = serviceComponents.someProperty('componentName', serviceComponentName);
@@ -357,7 +357,7 @@ App.StackService.configCategories = function () {
     case 'HAWQ':
       serviceConfigCategories.pushObjects([
         App.ServiceConfigCategory.create({ name: 'General', displayName: 'General'}),
-        App.ServiceConfigCategory.create({ name: 'AdvancedGpcheck', displayName: 'Advanced gpcheck'})
+        App.ServiceConfigCategory.create({ name: 'AdvancedHawqCheck', displayName: 'Advanced HAWQ Check'})
       ]);
       break;
     default:


[31/33] ambari git commit: AMBARI-15084 Cover service's views with unit tests. (atkach)

Posted by nc...@apache.org.
AMBARI-15084 Cover service's views with unit tests. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1bd4c232
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1bd4c232
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1bd4c232

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1bd4c2329192157f0977bb7e1941485202b04837
Parents: 84ca6a8
Author: Andrii Tkach <at...@hortonworks.com>
Authored: Thu Feb 18 09:51:59 2016 +0200
Committer: Andrii Tkach <at...@hortonworks.com>
Committed: Thu Feb 18 09:51:59 2016 +0200

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   2 +
 ambari-web/app/models/service.js                |   2 +-
 .../app/views/common/chart/linear_time.js       |   4 +
 ambari-web/app/views/main/service/info/menu.js  |  45 +-
 .../app/views/main/service/info/summary.js      | 154 ++--
 .../service/reassign/step4_controller_test.js   |   5 +-
 .../test/views/common/chart/linear_time_test.js |   3 +-
 .../stack_upgrade/upgrade_wizard_view_test.js   |   5 -
 .../service/info/component_list_view_test.js    | 153 ++++
 .../test/views/main/service/info/config_test.js |  99 ++-
 .../test/views/main/service/info/menu_test.js   |  93 ++
 .../views/main/service/info/summary_test.js     | 852 +++++++++++++------
 12 files changed, 1040 insertions(+), 377 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 0695d06..06c4c31 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -270,6 +270,8 @@ var files = [
   'test/views/main/service/item_test',
   'test/views/main/service/info/config_test',
   'test/views/main/service/info/summary_test',
+  'test/views/main/service/info/menu_test',
+  'test/views/main/service/info/component_list_view_test',
   'test/views/main/service/info/metrics/ambari_metrics/regionserver_base_test',
   'test/views/main/service/info/metrics/flume/flume_agent_metrics_section_test',
   'test/views/main/service/services/ranger_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/app/models/service.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/service.js b/ambari-web/app/models/service.js
index 39a6b1c..c1ed776 100644
--- a/ambari-web/app/models/service.js
+++ b/ambari-web/app/models/service.js
@@ -229,7 +229,7 @@ App.Service.Health = {
  * association between service and extended model name
  * @type {Object}
  */
-  App.Service.extendedModel = {
+App.Service.extendedModel = {
   'HDFS': 'HDFSService',
   'HBASE': 'HBaseService',
   'YARN': 'YARNService',

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/app/views/common/chart/linear_time.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/chart/linear_time.js b/ambari-web/app/views/common/chart/linear_time.js
index 064bec3..864203d 100644
--- a/ambari-web/app/views/common/chart/linear_time.js
+++ b/ambari-web/app/views/common/chart/linear_time.js
@@ -199,6 +199,10 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
     });
   },
 
+  setCurrentTimeIndex: function () {
+    this.set('currentTimeIndex', this.get('parentView.currentTimeRangeIndex'));
+  }.observes('parentView.currentTimeRangeIndex'),
+
   /**
    * Maps server data into series format ready for export to graph and JSON formats
    * @param jsonData

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/app/views/main/service/info/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/menu.js b/ambari-web/app/views/main/service/info/menu.js
index 3de23fd..3533a72 100644
--- a/ambari-web/app/views/main/service/info/menu.js
+++ b/ambari-web/app/views/main/service/info/menu.js
@@ -21,29 +21,46 @@ var App = require('app');
 App.MainServiceInfoMenuView = Em.CollectionView.extend({
   tagName: 'ul',
   classNames: ["nav", "nav-tabs", "background-text"],
-  content:function(){
+  content: function () {
     var menuItems = [
-      { label: Em.I18n.t('services.service.info.menu.summary'), id: 'summary-service-tab',routing:'summary', active:"active"}
-      //{ label:'Audit', routing:'audit'}
+      {
+        label: Em.I18n.t('services.service.info.menu.summary'),
+        id: 'summary-service-tab',
+        routing: 'summary',
+        active: "active"
+      }
     ];
 
-    if(this.get('heatmapTab')) menuItems.push({ label: Em.I18n.t('services.service.info.menu.heatmaps'), id: 'heatmap-service-tab', routing:'heatmaps'});
-    if(this.get('configTab')) menuItems.push({ label: Em.I18n.t('services.service.info.menu.configs'), id: 'configs-service-tab', routing:'configs'});
+    if (this.get('heatmapTab')) {
+      menuItems.push({
+        label: Em.I18n.t('services.service.info.menu.heatmaps'),
+        id: 'heatmap-service-tab',
+        routing: 'heatmaps'
+      });
+    }
+    if (this.get('configTab')) {
+      menuItems.push({
+        label: Em.I18n.t('services.service.info.menu.configs'),
+        id: 'configs-service-tab',
+        routing: 'configs'
+      });
+    }
     return menuItems;
   }.property(),
 
-  init: function(){ this._super(); this.activateView(); },
+  init: function () {
+    this._super();
+    this.activateView();
+  },
 
-  activateView:function () {
-    $.each(this._childViews, function () {
-      this.set('active', (document.URL.endsWith(this.get('content.routing')) ? "active" : ""));
-    });
+  activateView: function () {
+    this.get('_childViews').forEach(function(view) {
+      view.set('active', (document.URL.endsWith(view.get('content.routing')) ? "active" : ""));
+    }, this);
   }.observes('App.router.location.lastSetURL'),
 
-  deactivateChildViews: function() {
-    $.each(this._childViews, function(){
-      this.set('active', "");
-    });
+  deactivateChildViews: function () {
+    this.get('_childViews').setEach('active', '');
   },
 
   itemViewClass: Em.View.extend({

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/app/views/main/service/info/summary.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/info/summary.js b/ambari-web/app/views/main/service/info/summary.js
index d0aadc0..2482913 100644
--- a/ambari-web/app/views/main/service/info/summary.js
+++ b/ambari-web/app/views/main/service/info/summary.js
@@ -89,7 +89,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
   hasManyServers: Em.computed.gt('servers.length', 1),
 
   clientsHostText: function () {
-    if (this.get('controller.content.installedClients').length == 0) {
+    if (this.get('controller.content.installedClients').length === 0) {
       return '';
     } else if (this.get("hasManyClients")) {
       return Em.I18n.t('services.service.summary.viewHosts');
@@ -134,9 +134,11 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
     return result;
   }.property('controller.content'),
 
-  historyServerUI: function(){
-    var service=this.get('controller.content');
-    return (App.singleNodeInstall ? "http://" + App.singleNodeAlias + ":19888" : "http://" + service.get("hostComponents").findProperty('isMaster', true).get("host").get("publicHostName")+":19888");
+  historyServerUI: function () {
+    var master = this.get('controller.content.hostComponents').findProperty('isMaster');
+    return (App.singleNodeInstall
+      ? "http://" + App.singleNodeAlias + ":19888"
+      : "http://" + master.get("host.publicHostName") + ":19888");
   }.property('controller.content'),
 
   /**
@@ -189,7 +191,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
   updateComponentList: function(source, data) {
     var sourceIds = source.mapProperty('id');
     var dataIds = data.mapProperty('id');
-    if (sourceIds.length == 0) {
+    if (sourceIds.length === 0) {
       source.pushObjects(data);
     }
     if (source.length > data.length) {
@@ -216,13 +218,14 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
   },
 
   /**
+   * @type {Em.View}
    * Wrapper for displayName. used to render correct display name for mysql_server
    */
   componentNameView: Ember.View.extend({
     template: Ember.Handlebars.compile('{{view.displayName}}'),
     comp : null,
-    displayName: function(){
-      if(this.get('comp.componentName') == 'MYSQL_SERVER'){
+    displayName: function () {
+      if (this.get('comp.componentName') === 'MYSQL_SERVER') {
         return this.t('services.hive.databaseComponent');
       }
       return this.get('comp.displayName');
@@ -232,43 +235,32 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
   service: null,
 
   getServiceModel: function (serviceName) {
-    var svc = App.Service.find(serviceName);
-    var svcName = svc.get('serviceName');
-    if (svcName) {
-      switch (svcName.toLowerCase()) {
-        case 'hdfs':
-          svc = App.HDFSService.find().objectAt(0);
-          break;
-        case 'yarn':
-          svc = App.YARNService.find().objectAt(0);
-          break;
-        case 'hbase':
-          svc = App.HBaseService.find().objectAt(0);
-          break;
-        case 'flume':
-          svc = App.FlumeService.find().objectAt(0);
-          break;
-        case 'storm':
-          svc = App.StormService.find().objectAt(0);
-          break;
-        default:
-          break;
-      }
+    var extended = App.Service.extendedModel[serviceName];
+    if (extended) {
+      return App[extended].find().objectAt(0);
     }
-    return svc;
+    return App.Service.find(serviceName);
   },
 
-  isHide:true,
-  moreStatsView:Em.View.extend({
-    tagName:"a",
-    template:Ember.Handlebars.compile('{{t services.service.summary.moreStats}}'),
-    attributeBindings:[ 'href' ],
-    classNames:[ 'more-stats' ],
-    click:function (event) {
+  /**
+   * @type {boolean}
+   * @default true
+   */
+  isHide: true,
+
+  /**
+   * @type {Em.View}
+   */
+  moreStatsView: Em.View.extend({
+    tagName: "a",
+    template: Ember.Handlebars.compile('{{t services.service.summary.moreStats}}'),
+    attributeBindings: ['href'],
+    classNames: ['more-stats'],
+    click: function (event) {
       this._parentView._parentView.set('isHide', false);
       this.remove();
     },
-    href:'javascript:void(null)'
+    href: 'javascript:void(null)'
   }),
 
   serviceName: Em.computed.alias('service.serviceName'),
@@ -293,7 +285,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
     return App.AlertDefinition.find().someProperty('serviceName', this.get('controller.content.serviceName'));
   }.property('controller.content.serviceName'),
 
-  updateComponentInformation: function() {
+  updateComponentInformation: function () {
     var hc = this.get('controller.content.restartRequiredHostsAndComponents');
     var hostsCount = 0;
     var componentsCount = 0;
@@ -305,10 +297,11 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
     this.set('hostsCount', hostsCount);
   }.observes('controller.content.restartRequiredHostsAndComponents'),
 
-  rollingRestartSlaveComponentName : function() {
+  rollingRestartSlaveComponentName: function() {
     return batchUtils.getRollingRestartComponentName(this.get('serviceName'));
   }.property('serviceName'),
-  rollingRestartActionName : function() {
+
+  rollingRestartActionName: function() {
     var label = null;
     var componentName = this.get('rollingRestartSlaveComponentName');
     if (componentName) {
@@ -349,6 +342,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
       }, bodyMessage);
     }
   },
+
   rollingRestartStaleConfigSlaveComponents: function (componentName) {
     batchUtils.launchHostComponentRollingRestart(componentName.context, this.get('service.displayName'), this.get('service.passiveState') === "ON", true);
   },
@@ -358,31 +352,25 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
    * the array into sections of 5 for displaying on the page
    * (will only display rows with 5 items)
    */
-  constructGraphObjects: function(graphNames) {
-    var result = [], graphObjects = [], chunkSize = this.get('chunkSize');
-    var self = this;
-    var serviceName = this.get('controller.content.serviceName');
-    var stackService = App.StackService.find().findProperty('serviceName', serviceName);
+  constructGraphObjects: function (graphNames) {
+    var self = this,
+        stackService = App.StackService.find(this.get('controller.content.serviceName'));
 
     if (!graphNames && !stackService.get('isServiceWithWidgets')) {
-      self.get('serviceMetricGraphs').clear();
-      self.set('isServiceMetricLoaded', false);
+      this.get('serviceMetricGraphs').clear();
+      this.set('isServiceMetricLoaded', false);
       return;
     }
 
-    // load time range for current service from server
-    self.getUserPref(self.get('persistKey')).complete(function () {
-      var index = self.get('currentTimeRangeIndex');
+    // load time range(currentTimeRangeIndex) for current service from server
+    this.getUserPref(self.get('persistKey')).complete(function () {
+      var result = [], graphObjects = [], chunkSize = self.get('chunkSize');
       if (graphNames) {
-        graphNames.forEach(function(graphName) {
-          graphObjects.push(App["ChartServiceMetrics" + graphName].extend({
-            setCurrentTimeIndex: function () {
-              this.set('currentTimeIndex', this.get('parentView.currentTimeRangeIndex'));
-            }.observes('parentView.currentTimeRangeIndex')
-          }));
+        graphNames.forEach(function (graphName) {
+          graphObjects.push(App["ChartServiceMetrics" + graphName].extend());
         });
       }
-      while(graphObjects.length) {
+      while (graphObjects.length) {
         result.push(graphObjects.splice(0, chunkSize));
       }
       self.set('serviceMetricGraphs', result);
@@ -394,6 +382,11 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
    * Contains graphs for this particular service
    */
   serviceMetricGraphs: [],
+
+  /**
+   * @type {boolean}
+   * @default false
+   */
   isServiceMetricLoaded: false,
 
   /**
@@ -409,7 +402,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
   },
 
   getUserPrefErrorCallback: function (request) {
-    if (request.status == 404) {
+    if (request.status === 404) {
       this.postUserPref(this.get('persistKey'), 0);
       this.set('currentTimeRangeIndex', 0);
     }
@@ -543,7 +536,7 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
     this._super();
     var svcName = this.get('controller.content.serviceName');
     this.set('service', this.getServiceModel(svcName));
-    var isMetricsSupported = svcName != 'STORM' || App.get('isStormMetricsSupported');
+    var isMetricsSupported = svcName !== 'STORM' || App.get('isStormMetricsSupported');
 
     this.get('controller').getActiveWidgetLayout();
     if (App.get('supports.customizedWidgetLayout')) {
@@ -551,11 +544,35 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
     }
 
     if (svcName && isMetricsSupported) {
-      var allServices =  require('data/service_graph_config');
+      var allServices = require('data/service_graph_config');
       this.constructGraphObjects(allServices[svcName.toLowerCase()]);
     }
-    // adjust the summary table height
+    this.adjustSummaryHeight();
+    this.makeSortable();
+    this.addWidgetTooltip();
+    App.loadTimer.finish('Service Summary Page');
+  },
+
+  addWidgetTooltip: function() {
+    Em.run.later(this, function () {
+      App.tooltip($("[rel='add-widget-tooltip']"));
+      // enable description show up on hover
+      $('.thumbnail').hoverIntent(function() {
+        if ($(this).is(':hover')) {
+          $(this).find('.hidden-description').delay(1000).fadeIn(200).end();
+        }
+      }, function() {
+        $(this).find('.hidden-description').stop().hide().end();
+      });
+    }, 1000);
+  },
+
+  /**
+   * adjust the summary table height
+   */
+  adjustSummaryHeight: function() {
     var summaryTable = document.getElementById('summary-info');
+
     if (summaryTable) {
       var rows = $(summaryTable).find('tr');
       if (rows != null && rows.length > 0) {
@@ -567,19 +584,6 @@ App.MainServiceInfoSummaryView = Em.View.extend(App.UserPref, App.TimeRangeMixin
         }
       }
     }
-    this.makeSortable();
-    Em.run.later(this, function () {
-      App.tooltip($("[rel='add-widget-tooltip']"));
-      // enalble description show up on hover
-      $('.thumbnail').hoverIntent(function() {
-        if ($(this).is(':hover')) {
-          $(this).find('.hidden-description').delay(1000).fadeIn(200).end();
-        }
-      }, function() {
-        $(this).find('.hidden-description').stop().hide().end();
-      });
-    }, 1000);
-    App.loadTimer.finish('Service Summary Page');
   },
 
   willDestroyElement: function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
index 945718f..4406bb0 100644
--- a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
+++ b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js
@@ -952,7 +952,10 @@ describe('App.ReassignMasterWizardStep4Controller', function () {
     it('reassign host does not match current', function () {
       controller.set('content.reassignHosts.source', 'host2');
       controller.startNameNode();
-      expect(controller.updateComponent.calledWith('NAMENODE', ['host1'], 'HDFS', 'Start')).to.be.true;
+      expect(controller.updateComponent.getCall(0).args[0]).to.be.equal('NAMENODE');
+      expect(controller.updateComponent.getCall(0).args[1][0]).to.be.equal('host1');
+      expect(controller.updateComponent.getCall(0).args[2]).to.be.equal('HDFS');
+      expect(controller.updateComponent.getCall(0).args[3]).to.be.equal('Start');
     });
 
     it('reassign host matches current', function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/common/chart/linear_time_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/chart/linear_time_test.js b/ambari-web/test/views/common/chart/linear_time_test.js
index c547249..0484fd1 100644
--- a/ambari-web/test/views/common/chart/linear_time_test.js
+++ b/ambari-web/test/views/common/chart/linear_time_test.js
@@ -302,7 +302,8 @@ describe('App.ChartLinearTimeView', function () {
           parentView: Em.Object.create({
             currentTimeRangeIndex: 2
           })
-        })
+        }),
+        timeUnitSecondsSetter: Em.K
       });
     });
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/main/admin/stack_upgrade/upgrade_wizard_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_wizard_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_wizard_view_test.js
index 6349dfe..5630a8c 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_wizard_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_wizard_view_test.js
@@ -74,11 +74,6 @@ describe('App.upgradeWizardView', function () {
   });
 
   describe("#activeGroup", function () {
-    after(function () {
-      view.reopen({
-        activeGroup: Em.Object.create()
-      });
-    });
     it("status is updated", function () {
       view.reopen({
         activeStatuses: ['IN_PROGRESS'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/main/service/info/component_list_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/info/component_list_view_test.js b/ambari-web/test/views/main/service/info/component_list_view_test.js
new file mode 100644
index 0000000..1537f2b
--- /dev/null
+++ b/ambari-web/test/views/main/service/info/component_list_view_test.js
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('views/main/service/info/menu');
+
+
+describe('App.SummaryMasterComponentsView', function () {
+  var view;
+
+  beforeEach(function () {
+    view = App.SummaryMasterComponentsView.create({
+      controller: Em.Object.create()
+    });
+  });
+
+  describe("#mastersCompWillChange", function() {
+
+    beforeEach(function() {
+      sinon.stub(view, 'removeTooltips');
+    });
+    afterEach(function() {
+      view.removeTooltips.restore();
+    });
+
+    it("removeTooltips should be called", function() {
+      view.mastersCompWillChange();
+      expect(view.removeTooltips.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#mastersCompDidChange", function() {
+
+    beforeEach(function() {
+      sinon.stub(view, 'attachTooltip');
+    });
+    afterEach(function() {
+      view.attachTooltip.restore();
+    });
+
+    it("attachTooltip should be called", function() {
+      view.mastersCompDidChange();
+      expect(view.attachTooltip.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#didInsertElement", function() {
+
+    beforeEach(function() {
+      sinon.stub(view, 'attachTooltip');
+    });
+    afterEach(function() {
+      view.attachTooltip.restore();
+    });
+
+    it("attachTooltip should be called", function() {
+      view.didInsertElement();
+      expect(view.attachTooltip.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#willDestroyElement", function() {
+    var mock = {tooltip: Em.K};
+
+    beforeEach(function() {
+      sinon.spy(mock, 'tooltip');
+      sinon.stub(window, '$').returns(mock);
+    });
+    afterEach(function() {
+      mock.tooltip.restore();
+      window.$.restore();
+    });
+
+    it("tooltip should be called", function() {
+      view.willDestroyElement();
+      expect(mock.tooltip.calledWith('destroy')).to.be.true;
+    });
+  });
+
+  describe("#removeTooltips", function() {
+    var mock = {
+      tooltip: Em.K
+    };
+
+    beforeEach(function() {
+      sinon.spy(mock, 'tooltip');
+      sinon.stub(window, '$').returns(mock);
+    });
+    afterEach(function() {
+      mock.tooltip.restore();
+      window.$.restore();
+    });
+
+    it("tooltip should be called", function() {
+      mock.length = 1;
+      view.removeTooltips();
+      expect(mock.tooltip.calledWith('destroy')).to.be.true;
+    });
+
+    it("tooltip should not be called", function() {
+      mock.length = 0;
+      view.removeTooltips();
+      expect(mock.tooltip.called).to.be.false;
+    });
+  });
+
+  describe("#attachTooltip", function() {
+    var mock = {
+      tooltip: Em.K
+    };
+
+    beforeEach(function() {
+      sinon.stub(App, 'tooltip');
+      sinon.stub(window, '$').returns(mock);
+    });
+    afterEach(function() {
+      App.tooltip.restore();
+      window.$.restore();
+    });
+
+    it("tooltip should be called", function() {
+      mock.length = 1;
+      view.attachTooltip();
+      expect(App.tooltip.calledOnce).to.be.true;
+    });
+
+    it("tooltip should not be called", function() {
+      mock.length = 0;
+      view.attachTooltip();
+      expect(App.tooltip.called).to.be.false;
+    });
+  });
+
+
+
+
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/main/service/info/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/info/config_test.js b/ambari-web/test/views/main/service/info/config_test.js
index 4e369ea..fb66243 100644
--- a/ambari-web/test/views/main/service/info/config_test.js
+++ b/ambari-web/test/views/main/service/info/config_test.js
@@ -19,6 +19,7 @@
 
 var App = require('app');
 require('views/main/service/info/configs');
+var batchUtils = require('utils/batch_scheduled_requests');
 
 describe('App.MainServiceInfoConfigsView', function () {
 
@@ -26,7 +27,11 @@ describe('App.MainServiceInfoConfigsView', function () {
 
   beforeEach(function () {
     view = App.MainServiceInfoConfigsView.create({
-      controller: Em.Object.create()
+      controller: Em.Object.create({
+        loadStep: Em.K,
+        clearStep: Em.K,
+        content: Em.Object.create()
+      })
     });
   });
 
@@ -101,4 +106,96 @@ describe('App.MainServiceInfoConfigsView', function () {
     });
   });
 
+  describe("#didInsertElement()", function() {
+    var mock = {
+      isLoading: function () {
+        return {
+          done: function (callback) {
+            callback();
+          }
+        }
+      }
+    };
+
+    beforeEach(function() {
+      sinon.stub(App.router, 'get').returns(mock);
+      sinon.stub(view.get('controller'), 'loadStep');
+      sinon.stub(view, 'resetConfigTabSelection');
+      view.didInsertElement();
+    });
+    afterEach(function() {
+      App.router.get.restore();
+      view.get('controller').loadStep.restore();
+      view.resetConfigTabSelection.restore();
+    });
+
+    it("loadStep should be called", function() {
+      expect(view.get('controller').loadStep.calledOnce).to.be.true;
+    });
+
+    it("resetConfigTabSelection should be called", function() {
+      expect(view.resetConfigTabSelection.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#willDestroyElement()", function() {
+
+    beforeEach(function() {
+      sinon.stub(view.get('controller'), 'clearStep');
+    });
+    afterEach(function() {
+      view.get('controller').clearStep.restore();
+    });
+
+    it("resetConfigTabSelection should be called", function() {
+      view.willDestroyElement();
+      expect(view.get('controller').clearStep.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#rollingRestartSlaveComponentName", function() {
+
+    beforeEach(function() {
+      sinon.stub(batchUtils, 'getRollingRestartComponentName', function(input) {
+        return input;
+      });
+    });
+    afterEach(function() {
+      batchUtils.getRollingRestartComponentName.restore();
+    });
+
+    it("should return service name", function() {
+      view.set('controller.content.serviceName', 'S1');
+      view.propertyDidChange('rollingRestartSlaveComponentName');
+      expect(view.get('rollingRestartSlaveComponentName')).to.equal('S1');
+    });
+  });
+
+  describe("#rollingRestartActionName", function() {
+
+    beforeEach(function() {
+      sinon.stub(App.format, 'role', function(input) {
+        return input;
+      });
+    });
+    afterEach(function() {
+      App.format.role.restore();
+    });
+
+    it("should return action name", function() {
+      view.reopen({
+        rollingRestartSlaveComponentName: 'C1'
+      });
+      view.propertyDidChange('rollingRestartActionName');
+      expect(view.get('rollingRestartActionName')).to.equal(Em.I18n.t('rollingrestart.dialog.title').format('C1'));
+    });
+
+    it("should return empty", function() {
+      view.reopen({
+        rollingRestartSlaveComponentName: null
+      });
+      view.propertyDidChange('rollingRestartActionName');
+      expect(view.get('rollingRestartActionName')).to.be.empty;
+    });
+  });
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/main/service/info/menu_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/info/menu_test.js b/ambari-web/test/views/main/service/info/menu_test.js
new file mode 100644
index 0000000..5caa5b0
--- /dev/null
+++ b/ambari-web/test/views/main/service/info/menu_test.js
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+var App = require('app');
+require('views/main/service/info/menu');
+
+
+describe('App.MainServiceInfoMenuView', function () {
+  var view;
+
+  beforeEach(function () {
+    view = App.MainServiceInfoMenuView.create({
+      controller: Em.Object.create()
+    });
+  });
+
+  describe("#content", function() {
+
+    it("heatmapTab and configTab are false", function() {
+      view.setProperties({
+        configTab: false,
+        heatmapTab: false
+      });
+      view.propertyDidChange('content');
+      expect(view.get('content').mapProperty('id')).to.eql(['summary-service-tab']);
+    });
+
+    it("heatmapTab - false, configTab - true", function() {
+      view.setProperties({
+        configTab: true,
+        heatmapTab: false
+      });
+      view.propertyDidChange('content');
+      expect(view.get('content').mapProperty('id')).to.eql(['summary-service-tab', 'configs-service-tab']);
+    });
+
+    it("heatmapTab - true, configTab - false", function() {
+      view.setProperties({
+        configTab: false,
+        heatmapTab: true
+      });
+      view.propertyDidChange('content');
+      expect(view.get('content').mapProperty('id')).to.eql(['summary-service-tab', 'heatmap-service-tab']);
+    });
+
+    it("heatmapTab - true, configTab - true", function() {
+      view.setProperties({
+        configTab: true,
+        heatmapTab: true
+      });
+      view.propertyDidChange('content');
+      expect(view.get('content').mapProperty('id')).to.eql(['summary-service-tab', 'heatmap-service-tab', 'configs-service-tab']);
+    });
+  });
+
+  describe("#activateView()", function() {
+    it("_childViews should be active", function() {
+      view.set('_childViews', [
+        Em.Object.create({active: '', content: {routing: 'login'}})
+      ]);
+      view.activateView();
+      expect(view.get('_childViews')[0].get('active')).to.equal('active');
+    });
+  });
+
+  describe("#deactivateChildViews()", function() {
+    it("_childViews should be deactivated", function() {
+      view.set('_childViews', [
+        Em.Object.create({active: 'active'}),
+        Em.Object.create({active: 'active'})
+      ]);
+      view.deactivateChildViews();
+      expect(view.get('_childViews')[0].get('active')).to.be.empty;
+      expect(view.get('_childViews')[1].get('active')).to.be.empty;
+    });
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/1bd4c232/ambari-web/test/views/main/service/info/summary_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/service/info/summary_test.js b/ambari-web/test/views/main/service/info/summary_test.js
index 5e24490..6a84bb6 100644
--- a/ambari-web/test/views/main/service/info/summary_test.js
+++ b/ambari-web/test/views/main/service/info/summary_test.js
@@ -31,7 +31,8 @@ describe('App.MainServiceInfoSummaryView', function() {
         serviceName: 'HDFS',
         hostComponents: []
       }),
-      getActiveWidgetLayout: Em.K
+      getActiveWidgetLayout: Em.K,
+      loadWidgetLayouts: Em.K
     }),
     alertsController: Em.Object.create(),
     service: Em.Object.create()
@@ -196,270 +197,17 @@ describe('App.MainServiceInfoSummaryView', function() {
 
   });
 
-  describe('#didInsertElement', function () {
-
-    var cases = [
-      {
-        serviceName: 'STORM',
-        isStormMetricsSupported: false,
-        isConstructGraphObjectsCalled: false,
-        title: 'Storm, metrics not supported'
-      },
-      {
-        serviceName: 'STORM',
-        isStormMetricsSupported: true,
-        isConstructGraphObjectsCalled: true,
-        title: 'Storm, metrics supported'
-      },
-      {
-        serviceName: 'HDFS',
-        isConstructGraphObjectsCalled: true,
-        title: 'not Storm'
-      }
-    ];
-
-    beforeEach(function () {
-      sinon.stub(view, 'constructGraphObjects', Em.K);
-      this.mock = sinon.stub(App, 'get');
-    });
-
-    afterEach(function () {
-      view.constructGraphObjects.restore();
-      this.mock.restore();
-    });
-
-    cases.forEach(function (item) {
-      it(item.title, function () {
-        view.set('controller.content.serviceName', item.serviceName);
-        this.mock.withArgs('isStormMetricsSupported').returns(item.isStormMetricsSupported);
-        view.didInsertElement();
-        expect(view.constructGraphObjects.calledOnce).to.equal(item.isConstructGraphObjectsCalled);
-      });
-    });
-
-  });
-
-  describe.skip('#setTimeRange', function () {
-
-    var cases = [
-      {
-        currentTimeRangeIndex: 0,
-        isServiceMetricLoaded: false,
-        graphIds: [],
-        title: 'no event passed'
-      },
-      {
-        event: {},
-        currentTimeRangeIndex: 0,
-        isServiceMetricLoaded: false,
-        graphIds: [],
-        title: 'no event context passed'
-      },
-      {
-        event: {
-          context: {
-            index: 1
-          }
-        },
-        currentTimeRangeIndex: 1,
-        isServiceMetricLoaded: false,
-        graphIds: [],
-        title: 'no service name set'
-      },
-      {
-        event: {
-          context: {
-            index: 2
-          }
-        },
-        serviceName: 'HDFS',
-        currentTimeRangeIndex: 2,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-hdfs-space-utilization', 'service-metrics-hdfs-file-operations',
-            'service-metrics-hdfs-block-status', 'service-metrics-hdfs-io', 'service-metrics-hdfs-rpc'
-          ],
-          [
-            'service-metrics-hdfs-gc', 'service-metrics-hdfs-jvm-heap', 'service-metrics-hdfs-jvm-threads'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 3
-          }
-        },
-        serviceName: 'YARN',
-        currentTimeRangeIndex: 3,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-yarn-queue-allocated', 'service-metrics-yarn-queue-memory-resource',
-            'service-metrics-yarn-queue-allocated-container', 'service-metrics-yarn-node-manager-statuses',
-            'service-metrics-yarn-apps-current-states'
-          ],
-          [
-            'service-metrics-yarn-apps-finished-states', 'service-metrics-yarn-rpc', 'service-metrics-yarn-gc',
-            'service-metrics-yarn-jvm-threads', 'service-metrics-yarn-jvm-heap'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 4
-          }
-        },
-        serviceName: 'HBASE',
-        currentTimeRangeIndex: 4,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-hbase-cluster-requests', 'service-metrics-hbase-regionserver-rw-requests',
-            'service-metrics-hbase-regionserver-regions', 'service-metrics-hbase-regionserver-queuesize',
-            'service-metrics-hbase-hlog-split-time'
-          ],
-          [
-            'service-metrics-hbase-hlog-split-size'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 5
-          }
-        },
-        serviceName: 'AMBARI_METRICS',
-        currentTimeRangeIndex: 5,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-ambari-metrics-master-average-load',
-            'service-metrics-ambari-metrics-region-server-store-files',
-            'service-metrics-ambari-metrics-region-server-regions',
-            'service-metrics-ambari-metrics-region-server-requests',
-            'service-metrics-ambari-metrics-region-server-block-cache-hit-percent'
-          ],
-          [
-            'service-metrics-ambari-metrics-region-server-compaction-queue-size'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 6
-          }
-        },
-        serviceName: 'FLUME',
-        currentTimeRangeIndex: 6,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-flume-channel-size-mma', 'service-metrics-flume-channel-size-sum',
-            'service-metrics-flume-incoming_mma', 'service-metrics-flume-incoming_sum',
-            'service-metrics-flume-outgoing_mma'
-          ],
-          [
-            'service-metrics-flume-outgoing_sum'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 7
-          }
-        },
-        serviceName: 'STORM',
-        currentTimeRangeIndex: 7,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-storm-supervisor-allocated', 'service-metrics-storm-executors',
-            'service-metrics-storm-topologies', 'service-metrics-storm-tasks'
-          ]
-        ]
-      },
-      {
-        event: {
-          context: {
-            index: 8
-          }
-        },
-        serviceName: 'KAFKA',
-        chunkSize: 4,
-        currentTimeRangeIndex: 8,
-        isServiceMetricLoaded: true,
-        graphIds: [
-          [
-            'service-metrics-kafka-broker-topic-metrics', 'service-metrics-kafka-controller-metrics',
-            'service-metrics-kafka-controler-status-metrics', 'service-metrics-kafka-replica-manager-metrics'
-          ],
-          [
-            'service-metrics-kafka-log-metrics', 'service-metrics-kafka-replica-fetcher-metrics'
-          ]
-        ]
-      }
-    ];
-
-    beforeEach(function () {
-      sinon.stub(view, 'postUserPref', Em.K);
-      view.setProperties({
-        chunkSize: 5,
-        currentTimeRangeIndex: 0,
-        isServiceMetricLoaded: false,
-        serviceMetricGraphs: []
-      });
-    });
-
-    afterEach(function () {
-      view.postUserPref.restore();
-    });
-
-    cases.forEach(function (item) {
-      it(item.serviceName || item.title, function () {
-        view.set('chunkSize', Em.isNone(item.chunkSize) ? 5 : item.chunkSize);
-        view.set('service.serviceName', item.serviceName);
-        view.setTimeRange(item.event);
-        var graphIndices = [],
-          graphIds = view.get('serviceMetricGraphs').map(function (graphs) {
-          return graphs.map(function (graph) {
-            var graphView = graph.create();
-            graphIndices.push(graphView.get('currentTimeIndex'));
-            return graphView.get('id');
-          });
-        });
-        expect(view.get('currentTimeRangeIndex')).to.equal(item.currentTimeRangeIndex);
-        expect(view.get('isServiceMetricLoaded')).to.equal(item.isServiceMetricLoaded);
-        if (item.event && item.event.context && item.serviceName) {
-          expect(graphIndices.uniq()).to.eql([item.currentTimeRangeIndex]);
-        }
-        expect(graphIds).to.eql(item.graphIds);
-      });
-    });
-
-  });
-
   describe("#restartAllStaleConfigComponents", function () {
 
     describe('trigger restartAllServiceHostComponents', function () {
 
       beforeEach(function () {
-        view = App.MainServiceInfoSummaryView.create({
-          controller: Em.Object.create({
-            content: {
-              serviceName: "HDFS"
-            },
-            getActiveWidgetLayout: Em.K
-          }),
-          service: Em.Object.create({
-            displayName: 'HDFS'
-          })
-        });
+        view.set('controller.content', Em.Object.create({
+          serviceName: "HDFS"
+        }));
+        view.set('service', Em.Object.create({
+          displayName: 'HDFS'
+        }));
         sinon.stub(batchUtils, "restartAllServiceHostComponents", Em.K);
       });
 
@@ -479,25 +227,20 @@ describe('App.MainServiceInfoSummaryView', function() {
       var mainServiceItemController;
 
       beforeEach(function () {
-        view = App.MainServiceInfoSummaryView.create({
-          controller: Em.Object.create({
-            content: {
-              serviceName: "HDFS",
-              hostComponents: [{
-                componentName: 'NAMENODE',
-                workStatus: 'STARTED'
-              }],
-              restartRequiredHostsAndComponents: {
-                "host1": ['NameNode'],
-                "host2": ['DataNode', 'ZooKeeper']
-              }
-            },
-            getActiveWidgetLayout: Em.K
-          }),
-          service: Em.Object.create({
-            displayName: 'HDFS'
-          })
-        });
+        view.set('controller.content', Em.Object.create({
+          serviceName: "HDFS",
+          hostComponents: [{
+            componentName: 'NAMENODE',
+            workStatus: 'STARTED'
+          }],
+          restartRequiredHostsAndComponents: {
+            "host1": ['NameNode'],
+            "host2": ['DataNode', 'ZooKeeper']
+          }
+        }));
+        view.set('service', Em.Object.create({
+          displayName: 'HDFS'
+        }));
         mainServiceItemController = App.MainServiceItemController.create({});
         sinon.stub(mainServiceItemController, 'checkNnLastCheckpointTime', function() {
           return true;
@@ -580,4 +323,555 @@ describe('App.MainServiceInfoSummaryView', function() {
     });
   });
 
+  describe("#clientsHostText", function() {
+
+    it("no installed clients", function() {
+      view.set('controller.content.installedClients', []);
+      view.propertyDidChange('clientsHostText');
+      expect(view.get('clientsHostText')).to.be.empty;
+    });
+
+    it("has many clients", function() {
+      view.set('controller.content.installedClients', [1]);
+      view.reopen({
+        hasManyClients: true
+      });
+      view.propertyDidChange('clientsHostText');
+      expect(view.get('clientsHostText')).to.be.equal(Em.I18n.t('services.service.summary.viewHosts'));
+    });
+
+    it("otherwise", function() {
+      view.set('controller.content.installedClients', [1]);
+      view.reopen({
+        hasManyClients: false
+      });
+      view.propertyDidChange('clientsHostText');
+      expect(view.get('clientsHostText')).to.be.equal(Em.I18n.t('services.service.summary.viewHost'));
+    });
+  });
+
+  describe("#historyServerUI", function() {
+
+    it("singleNodeInstall is true", function() {
+      App.set('singleNodeInstall', true);
+      App.set('singleNodeAlias', 'alias');
+      view.propertyDidChange('historyServerUI');
+      expect(view.get('historyServerUI')).to.equal("http://alias:19888");
+    });
+
+    it("singleNodeInstall is false", function () {
+      App.set('singleNodeInstall', false);
+      view.set('controller.content', Em.Object.create({
+        hostComponents: [
+          Em.Object.create({
+            isMaster: true,
+            host: Em.Object.create({
+              publicHostName: 'host1'
+            })
+          })
+        ]
+      }));
+      view.propertyDidChange('historyServerUI');
+      expect(view.get('historyServerUI')).to.equal("http://host1:19888");
+    });
+  });
+
+  describe("#serversHost", function() {
+
+    it("should return empty object", function() {
+      view.set('controller.content', Em.Object.create({
+        id: 'S1',
+        hostComponents: []
+      }));
+      view.propertyDidChange('serversHost');
+      expect(view.get('serversHost')).to.be.empty;
+    });
+
+    it("should return server object", function() {
+      view.set('controller.content', Em.Object.create({
+        id: 'ZOOKEEPER',
+        hostComponents: [
+          Em.Object.create({
+            isMaster: true
+          })
+        ]
+      }));
+      view.propertyDidChange('serversHost');
+      expect(view.get('serversHost')).to.eql(Em.Object.create({
+        isMaster: true
+      }));
+    });
+  });
+
+  describe("#updateComponentList()", function() {
+
+    it("add components to empty source", function() {
+      var source = [],
+          data = [{id: 1}];
+      view.updateComponentList(source, data);
+      expect(source.mapProperty('id')).to.eql([1]);
+    });
+
+    it("add components to exist source", function() {
+      var source = [{id: 1}],
+        data = [{id: 1}, {id: 2}];
+      view.updateComponentList(source, data);
+      expect(source.mapProperty('id')).to.eql([1, 2]);
+    });
+
+    it("remove components from exist source", function() {
+      var source = [{id: 1}, {id: 2}],
+        data = [{id: 1}];
+      view.updateComponentList(source, data);
+      expect(source.mapProperty('id')).to.eql([1]);
+    });
+  });
+
+  describe("#componentNameView", function () {
+    var componentNameView;
+
+    beforeEach(function () {
+      componentNameView = view.get('componentNameView').create();
+    });
+
+    describe("#displayName", function () {
+
+      it("component is MYSQL_SERVER", function () {
+        componentNameView.set('comp', Em.Object.create({
+          componentName: 'MYSQL_SERVER'
+        }));
+        componentNameView.propertyDidChange('displayName');
+        expect(componentNameView.get('displayName')).to.equal(Em.I18n.t('services.hive.databaseComponent'));
+      });
+
+      it("any component", function () {
+        componentNameView.set('comp', Em.Object.create({
+          componentName: 'C1',
+          displayName: 'c1'
+        }));
+        componentNameView.propertyDidChange('displayName');
+        expect(componentNameView.get('displayName')).to.equal('c1');
+      });
+    });
+  });
+
+
+  describe("#getServiceModel()", function() {
+
+    beforeEach(function() {
+      sinon.stub(App.Service, 'find').returns({serviceName: 'S1'});
+      sinon.stub(App.HDFSService, 'find').returns([{serviceName: 'HDFS'}]);
+    });
+    afterEach(function() {
+      App.Service.find.restore();
+      App.HDFSService.find.restore();
+    });
+
+    it("HDFS service", function() {
+      expect(view.getServiceModel('HDFS')).to.eql({serviceName: 'HDFS'});
+    });
+
+    it("Simple model service", function() {
+      expect(view.getServiceModel('S1')).to.eql({serviceName: 'S1'});
+    });
+  });
+
+  describe("#updateComponentInformation()", function () {
+    it("should count hosts and components", function () {
+      view.set('controller.content.restartRequiredHostsAndComponents', {
+        'host1': ['c1', 'c2']
+      });
+      view.updateComponentInformation();
+      expect(view.get('componentsCount')).to.equal(2);
+      expect(view.get('hostsCount')).to.equal(1);
+    });
+  });
+
+  describe("#rollingRestartSlaveComponentName ", function() {
+
+    beforeEach(function() {
+      sinon.stub(batchUtils, 'getRollingRestartComponentName').returns('C1');
+    });
+    afterEach(function() {
+      batchUtils.getRollingRestartComponentName.restore();
+    });
+
+    it("should returns component name", function() {
+      view.set('serviceName', 'S1');
+      view.propertyDidChange('rollingRestartSlaveComponentName');
+      expect(view.get('rollingRestartSlaveComponentName')).to.equal('C1');
+    });
+  });
+
+  describe("#rollingRestartActionName ", function() {
+
+    beforeEach(function() {
+      sinon.stub(App.format, 'role').returns('C1');
+    });
+    afterEach(function() {
+      App.format.role.restore();
+    });
+
+    it("rollingRestartSlaveComponentName is set", function() {
+      view.reopen({
+        rollingRestartSlaveComponentName: 'C1'
+      });
+      view.propertyDidChange('rollingRestartActionName');
+      expect(view.get('rollingRestartActionName')).to.equal(Em.I18n.t('rollingrestart.dialog.title').format('C1'));
+    });
+
+    it("rollingRestartSlaveComponentName is null", function() {
+      view.reopen({
+        rollingRestartSlaveComponentName: null
+      });
+      view.propertyDidChange('rollingRestartActionName');
+      expect(view.get('rollingRestartActionName')).to.be.null;
+    });
+  });
+
+  describe("#rollingRestartStaleConfigSlaveComponents() ", function() {
+
+    beforeEach(function() {
+      sinon.stub(batchUtils, 'launchHostComponentRollingRestart');
+    });
+    afterEach(function() {
+      batchUtils.launchHostComponentRollingRestart.restore();
+    });
+
+    it("launchHostComponentRollingRestart should be called", function() {
+      view.get('service').setProperties({
+        displayName: 's1',
+        passiveState: 'ON'
+      });
+      view.rollingRestartStaleConfigSlaveComponents({context: 'C1'});
+      expect(batchUtils.launchHostComponentRollingRestart.calledWith(
+        'C1', 's1', true, true
+      )).to.be.true;
+    });
+  });
+
+  describe("#constructGraphObjects()", function() {
+    var mock = Em.Object.create({
+      isServiceWithWidgets: false
+    });
+
+    beforeEach(function() {
+      sinon.stub(App.StackService, 'find').returns(mock);
+      sinon.stub(view, 'getUserPref').returns({
+        complete: function(callback){callback();}
+      })
+    });
+    afterEach(function() {
+      App.StackService.find.restore();
+      view.getUserPref.restore();
+    });
+
+    it("metrics not loaded", function() {
+      mock.set('isServiceWithWidgets', false);
+      view.constructGraphObjects(null);
+      expect(view.get('isServiceMetricLoaded')).to.be.false;
+      expect(view.getUserPref.called).to.be.false;
+    });
+
+    it("metrics loaded", function() {
+      App.ChartServiceMetricsG1 = Em.Object.extend();
+      mock.set('isServiceWithWidgets', true);
+      view.constructGraphObjects(['G1']);
+      expect(view.get('isServiceMetricLoaded')).to.be.true;
+      expect(view.getUserPref.calledOnce).to.be.true;
+      expect(view.get('serviceMetricGraphs')).to.not.be.empty;
+    });
+  });
+
+  describe("#getUserPrefSuccessCallback()", function() {
+
+    it("currentTimeRangeIndex should be set", function() {
+      view.getUserPrefSuccessCallback(1);
+      expect(view.get('currentTimeRangeIndex')).to.equal(1);
+    });
+  });
+
+  describe("#getUserPrefErrorCallback()", function() {
+
+    beforeEach(function() {
+      sinon.stub(view, 'postUserPref');
+    });
+    afterEach(function() {
+      view.postUserPref.restore();
+    });
+
+    it("request.status = 404", function() {
+      view.getUserPrefErrorCallback({status: 404});
+      expect(view.get('currentTimeRangeIndex')).to.equal(0);
+      expect(view.postUserPref.calledOnce).to.be.true;
+    });
+
+    it("request.status = 403", function() {
+      view.getUserPrefErrorCallback({status: 403});
+      expect(view.postUserPref.called).to.be.false;
+    });
+  });
+
+  describe("#widgetActions", function() {
+
+    beforeEach(function() {
+      this.mock = sinon.stub(App, 'isAuthorized');
+      view.setProperties({
+        staticWidgetLayoutActions: [{id: 1}],
+        staticAdminPrivelegeWidgetActions: [{id: 2}],
+        staticGeneralWidgetActions: [{id: 3}]
+      });
+    });
+    afterEach(function() {
+      this.mock.restore();
+    });
+
+    it("not authorized", function() {
+      this.mock.returns(false);
+      view.propertyDidChange('widgetActions');
+      expect(view.get('widgetActions').mapProperty('id')).to.eql([3]);
+    });
+
+    it("is authorized", function() {
+      this.mock.returns(true);
+      App.supports.customizedWidgetLayout = true;
+      view.propertyDidChange('widgetActions');
+      expect(view.get('widgetActions').mapProperty('id')).to.eql([1, 2, 3]);
+    });
+  });
+
+  describe("#doWidgetAction()", function() {
+
+    beforeEach(function() {
+      view.set('controller.action1', Em.K);
+      sinon.stub(view.get('controller'), 'action1');
+    });
+    afterEach(function() {
+      view.get('controller').action1.restore();
+    });
+
+    it("action exist", function() {
+      view.doWidgetAction({context: 'action1'});
+      expect(view.get('controller').action1.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#setTimeRange", function() {
+
+    it("range = 0", function() {
+      var widget = Em.Object.create({
+        widgetType: 'GRAPH',
+        properties: {
+          time_range: '0'
+        }
+      });
+      view.set('controller.widgets', [widget]);
+      view.setTimeRange({context: {value: '0'}});
+      expect(widget.get('properties')['time_range']).to.equal('0')
+    });
+
+    it("range = 1", function() {
+      var widget = Em.Object.create({
+        widgetType: 'GRAPH',
+        properties: {
+          time_range: 0
+        }
+      });
+      view.set('controller.widgets', [widget]);
+      view.setTimeRange({context: {value: '1'}});
+      expect(widget.get('properties')['time_range']).to.equal('1')
+    });
+  });
+
+  describe("#makeSortable()", function() {
+    var mock = {
+      on: function(arg1, arg2, callback) {
+        callback();
+      },
+      off: Em.K,
+      sortable: function() {
+        return {
+          disableSelection: Em.K
+        }
+      }
+    };
+
+    beforeEach(function() {
+      sinon.stub(window, '$').returns(mock);
+      sinon.spy(mock, 'on');
+      sinon.spy(mock, 'off');
+      sinon.spy(mock, 'sortable');
+      view.makeSortable();
+    });
+    afterEach(function() {
+      window.$.restore();
+      mock.on.restore();
+      mock.off.restore();
+      mock.sortable.restore();
+    });
+
+    it("on() should be called", function() {
+      expect(mock.on.calledWith('DOMNodeInserted', '#widget_layout')).to.be.true;
+    });
+
+    it("sortable() should be called", function() {
+      expect(mock.sortable.calledOnce).to.be.true;
+    });
+
+    it("off() should be called", function() {
+      expect(mock.off.calledWith('DOMNodeInserted', '#widget_layout')).to.be.true;
+    });
+  });
+
+  describe('#didInsertElement', function () {
+
+    beforeEach(function () {
+      sinon.stub(view, 'constructGraphObjects', Em.K);
+      this.mock = sinon.stub(App, 'get');
+      sinon.stub(view, 'getServiceModel');
+      sinon.stub(view.get('controller'), 'getActiveWidgetLayout');
+      sinon.stub(view.get('controller'), 'loadWidgetLayouts');
+      sinon.stub(view, 'adjustSummaryHeight');
+      sinon.stub(view, 'makeSortable');
+      sinon.stub(view, 'addWidgetTooltip');
+
+    });
+
+    afterEach(function () {
+      view.constructGraphObjects.restore();
+      this.mock.restore();
+      view.getServiceModel.restore();
+      view.get('controller').getActiveWidgetLayout.restore();
+      view.get('controller').loadWidgetLayouts.restore();
+      view.adjustSummaryHeight.restore();
+      view.makeSortable.restore();
+      view.addWidgetTooltip.restore();
+    });
+
+    it("getServiceModel should be called", function() {
+      view.didInsertElement();
+      expect(view.getServiceModel.calledOnce).to.be.true;
+    });
+    it("adjustSummaryHeight should be called", function() {
+      view.didInsertElement();
+      expect(view.adjustSummaryHeight.calledOnce).to.be.true;
+    });
+    it("addWidgetTooltip should be called", function() {
+      view.didInsertElement();
+      expect(view.addWidgetTooltip.calledOnce).to.be.true;
+    });
+    it("makeSortable should be called", function() {
+      view.didInsertElement();
+      expect(view.makeSortable.calledOnce).to.be.true;
+    });
+    it("getActiveWidgetLayout should be called", function() {
+      view.didInsertElement();
+      expect(view.get('controller').getActiveWidgetLayout.calledOnce).to.be.true;
+    });
+
+    describe("serviceName is null, metrics not supported, widgets not supported", function() {
+      beforeEach(function () {
+        view.set('controller.content.serviceName', null);
+        this.mock.returns(false);
+        view.didInsertElement();
+      });
+
+      it("loadWidgetLayouts should not be called", function() {
+        expect(view.get('controller').loadWidgetLayouts.called).to.be.false;
+      });
+      it("constructGraphObjects should not be called", function() {
+        expect(view.constructGraphObjects.called).to.be.false;
+      });
+    });
+
+    describe("serviceName is set, metrics is supported, widgets is supported", function() {
+      beforeEach(function () {
+        view.set('controller.content.serviceName', 'S1');
+        this.mock.returns(true);
+        view.didInsertElement();
+      });
+
+      it("loadWidgetLayouts should be called", function() {
+        expect(view.get('controller').loadWidgetLayouts.calledOnce).to.be.true;
+      });
+      it("constructGraphObjects should be called", function() {
+        expect(view.constructGraphObjects.calledOnce).to.be.true;
+      });
+    });
+  });
+
+  describe("#addWidgetTooltip()", function() {
+    var mock = {
+      hoverIntent: Em.K
+    };
+
+    beforeEach(function() {
+      sinon.stub(Em.run, 'later', function(arg1, callback) {
+        callback();
+      });
+      sinon.stub(App, 'tooltip');
+      sinon.stub(window, '$').returns(mock);
+      sinon.spy(mock, 'hoverIntent');
+      view.addWidgetTooltip();
+    });
+    afterEach(function() {
+      Em.run.later.restore();
+      App.tooltip.restore();
+      window.$.restore();
+      mock.hoverIntent.restore();
+    });
+
+    it("Em.run.later should be called", function() {
+      expect(Em.run.later.calledOnce).to.be.true;
+    });
+    it("App.tooltip should be called", function() {
+      expect(App.tooltip.calledOnce).to.be.true;
+    });
+    it("hoverIntent should be called", function() {
+      expect(mock.hoverIntent.calledOnce).to.be.true;
+    });
+  });
+
+  describe("#adjustSummaryHeight()", function() {
+    var jQueryMock = {
+      find: Em.K,
+      attr: Em.K
+    };
+
+    beforeEach(function() {
+      sinon.stub(window, '$').returns(jQueryMock);
+      this.mockFind = sinon.stub(jQueryMock, 'find');
+      sinon.spy(jQueryMock, 'attr');
+      this.mockGetElementById = sinon.stub(document, 'getElementById');
+    });
+    afterEach(function() {
+      this.mockGetElementById.restore();
+      window.$.restore();
+      this.mockFind.restore();
+      jQueryMock.attr.restore();
+    });
+
+    it("summary-info not in DOM", function() {
+      this.mockGetElementById.returns(null);
+      view.adjustSummaryHeight();
+      expect(jQueryMock.find.called).to.be.false;
+    });
+
+    it("summary-info has no rows", function() {
+      this.mockGetElementById.returns({});
+      this.mockFind.returns(null);
+      view.adjustSummaryHeight();
+      expect(jQueryMock.find.calledOnce).to.be.true;
+      expect(jQueryMock.attr.called).to.be.false;
+    });
+
+    it("summary-info has rows", function() {
+      this.mockGetElementById.returns({
+        clientHeight: 10
+      });
+      this.mockFind.returns([{}]);
+      view.adjustSummaryHeight();
+      expect(jQueryMock.attr.calledWith('style', "height:20px;")).to.be.true;
+    });
+  });
 });
\ No newline at end of file


[17/33] ambari git commit: AMBARI-14912: Add upgrade support for Setting feature (Ajit Kumar via smnaha)

Posted by nc...@apache.org.
AMBARI-14912: Add upgrade support for Setting feature (Ajit Kumar via smnaha)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d06e8bf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d06e8bf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d06e8bf

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 7d06e8bf68abe725620c0eb60cce16959901c747
Parents: a91890a
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Wed Feb 17 09:06:39 2016 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Wed Feb 17 09:06:39 2016 -0800

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog240.java       | 76 +++++++++++++---
 .../server/upgrade/UpgradeCatalog240Test.java   | 92 ++++++++++++++------
 2 files changed, 132 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d06e8bf/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
index 2ea326a..d97962f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog240.java
@@ -18,6 +18,7 @@
 
 package org.apache.ambari.server.upgrade;
 
+import com.google.common.collect.Lists;
 import com.google.gson.JsonArray;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
@@ -30,6 +31,8 @@ import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.dao.AlertDefinitionDAO;
 import org.apache.ambari.server.orm.dao.DaoUtils;
+import org.apache.ambari.server.orm.dao.PermissionDAO;
+import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
 import org.apache.ambari.server.orm.entities.AlertDefinitionEntity;
 import org.apache.ambari.server.orm.entities.PermissionEntity;
 import org.apache.ambari.server.state.Cluster;
@@ -37,7 +40,9 @@ import org.apache.ambari.server.state.Clusters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -58,12 +63,19 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Inject
   DaoUtils daoUtils;
 
+  @Inject
+  PermissionDAO permissionDAO;
+
+  @Inject
+  ResourceTypeDAO resourceTypeDAO;
+
   /**
    * Logger.
    */
   private static final Logger LOG = LoggerFactory.getLogger(UpgradeCatalog240.class);
 
-
+  private static final String ID = "id";
+  private static final String SETTING_TABLE = "setting";
 
 
   // ----- Constructors ------------------------------------------------------
@@ -76,7 +88,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Inject
   public UpgradeCatalog240(Injector injector) {
     super(injector);
-    this.injector = injector;
+    injector.injectMembers(this);
   }
 
   // ----- UpgradeCatalog ----------------------------------------------------
@@ -103,6 +115,7 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
   @Override
   protected void executeDDLUpdates() throws AmbariException, SQLException {
     updateAdminPermissionTable();
+    createSettingTable();
   }
 
   @Override
@@ -115,7 +128,39 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     addNewConfigurationsFromXml();
     updateAlerts();
     setRoleSortOrder();
+    addSettingPermission();
+  }
+
+  private void createSettingTable() throws SQLException {
+    List<DBAccessor.DBColumnInfo> columns = new ArrayList<>();
 
+    //  Add setting table
+    LOG.info("Creating " + SETTING_TABLE + " table");
+
+    columns.add(new DBAccessor.DBColumnInfo(ID, Long.class, null, null, false));
+    columns.add(new DBAccessor.DBColumnInfo("name", String.class, 255, null, false));
+    columns.add(new DBAccessor.DBColumnInfo("setting_type", String.class, 255, null, false));
+    columns.add(new DBAccessor.DBColumnInfo("content", String.class, 3000, null, false));
+    columns.add(new DBAccessor.DBColumnInfo("updated_by", String.class, 255, "_db", false));
+    columns.add(new DBAccessor.DBColumnInfo("update_timestamp", Long.class, null, null, false));
+    dbAccessor.createTable(SETTING_TABLE, columns, ID);
+    addSequence("setting_id_seq", 0L, false);
+  }
+
+  protected void addSettingPermission() throws SQLException {
+    String administratorPermissionId =
+            permissionDAO.findPermissionByNameAndType("AMBARI.ADMINISTRATOR", resourceTypeDAO.findByName("AMBARI")).getId().toString();
+    String selectRoleSql = "select * from roleauthorization where authorization_id = 'AMBARI.MANAGE_SETTINGS'";
+    if (executeAndCheckEmptyResult(selectRoleSql)) {
+      dbAccessor.insertRow("roleauthorization", new String[]{"authorization_id", "authorization_name"},
+              new String[]{"'AMBARI.MANAGE_SETTINGS'", "'Manage settings'"}, false);
+    }
+
+    String selectPermissionSql = "select * from permission_roleauthorization where authorization_id = 'AMBARI.MANAGE_SETTINGS'";
+    if (executeAndCheckEmptyResult(selectPermissionSql)) {
+      dbAccessor.insertRow("permission_roleauthorization", new String[]{"permission_id", "authorization_id"},
+              new String[]{administratorPermissionId, "'AMBARI.MANAGE_SETTINGS'"}, false);
+    }
   }
 
   protected void updateAlerts() {
@@ -147,21 +192,21 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
 
       Map<AlertDefinitionEntity, List<String>> alertDefinitionParams = new HashMap<>();
       checkedPutToMap(alertDefinitionParams, namenodeLastCheckpointAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold")));
+              Lists.newArrayList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold"));
       checkedPutToMap(alertDefinitionParams, namenodeHAHealthAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("connection.timeout")));
+              Lists.newArrayList("connection.timeout"));
       checkedPutToMap(alertDefinitionParams, nodemanagerHealthAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("connection.timeout")));
+              Lists.newArrayList("connection.timeout"));
       checkedPutToMap(alertDefinitionParams, nodemanagerHealthSummaryAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("connection.timeout")));
+              Lists.newArrayList("connection.timeout"));
       checkedPutToMap(alertDefinitionParams, hiveMetastoreProcessAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab")));
+              Lists.newArrayList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab"));
       checkedPutToMap(alertDefinitionParams, hiveServerProcessAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab")));
+              Lists.newArrayList("default.smoke.user", "default.smoke.principal", "default.smoke.keytab"));
       checkedPutToMap(alertDefinitionParams, hiveWebhcatServerStatusAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("default.smoke.user", "connection.timeout")));
+              Lists.newArrayList("default.smoke.user", "connection.timeout"));
       checkedPutToMap(alertDefinitionParams, flumeAgentStatusAlertDefinitionEntity,
-              new ArrayList<String>(Arrays.asList("run.directory")));
+              Lists.newArrayList("run.directory"));
 
       for(Map.Entry<AlertDefinitionEntity, List<String>> entry : alertDefinitionParams.entrySet()){
         AlertDefinitionEntity alertDefinition = entry.getKey();
@@ -186,6 +231,17 @@ public class UpgradeCatalog240 extends AbstractUpgradeCatalog {
     }
   }
 
+  private boolean executeAndCheckEmptyResult(String sql) throws SQLException {
+    try(Statement statement = dbAccessor.getConnection().createStatement();
+        ResultSet resultSet = statement.executeQuery(sql)) {
+      if (resultSet != null && resultSet.next()) {
+        return false;
+      } else {
+        return true;
+      }
+    }
+  }
+
   protected String addParam(String source, List<String> params) {
     JsonObject sourceJson = new JsonParser().parse(source).getAsJsonObject();
     JsonArray parametersJson = sourceJson.getAsJsonArray("parameters");

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d06e8bf/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
index 608a348..a145253 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog240Test.java
@@ -19,91 +19,115 @@
 package org.apache.ambari.server.upgrade;
 
 
+import com.google.inject.Binder;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
+import com.google.inject.Module;
 import com.google.inject.Provider;
-import com.google.inject.persist.PersistService;
 import junit.framework.Assert;
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.GuiceJpaInitializer;
 import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
 import org.apache.ambari.server.orm.dao.StackDAO;
-import org.apache.ambari.server.orm.entities.StackEntity;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import javax.persistence.EntityManager;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
+import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.capture;
 import static org.easymock.EasyMock.createMockBuilder;
 import static org.easymock.EasyMock.createNiceMock;
 import static org.easymock.EasyMock.createStrictMock;
 import static org.easymock.EasyMock.eq;
 import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.expectLastCall;
 import static org.easymock.EasyMock.newCapture;
 import static org.easymock.EasyMock.replay;
 import static org.easymock.EasyMock.reset;
 import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
 
 public class UpgradeCatalog240Test {
-  private Injector injector;
+  private static Injector injector;
   private Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class);
   private EntityManager entityManager = createNiceMock(EntityManager.class);
-  private UpgradeCatalogHelper upgradeCatalogHelper;
-  private StackEntity desiredStackEntity;
 
 
+  @BeforeClass
+  public static void classSetUp() {
+    injector = Guice.createInjector(new InMemoryDefaultTestModule());
+    injector.getInstance(GuiceJpaInitializer.class);
+  }
 
   @Before
   public void init() {
     reset(entityManagerProvider);
     expect(entityManagerProvider.get()).andReturn(entityManager).anyTimes();
     replay(entityManagerProvider);
-    injector = Guice.createInjector(new InMemoryDefaultTestModule());
-    injector.getInstance(GuiceJpaInitializer.class);
 
-    upgradeCatalogHelper = injector.getInstance(UpgradeCatalogHelper.class);
+    injector.getInstance(UpgradeCatalogHelper.class);
     // inject AmbariMetaInfo to ensure that stacks get populated in the DB
     injector.getInstance(AmbariMetaInfo.class);
     // load the stack entity
     StackDAO stackDAO = injector.getInstance(StackDAO.class);
-    desiredStackEntity = stackDAO.find("HDP", "2.2.0");
+    stackDAO.find("HDP", "2.2.0");
   }
 
   @After
   public void tearDown() {
-    injector.getInstance(PersistService.class).stop();
   }
 
   @Test
-  public void testExecuteDDLUpdates() throws Exception {
-    UpgradeCatalog240 upgradeCatalog240 = injector.getInstance(UpgradeCatalog240.class);
-
+  public void testExecuteDDLUpdates() throws SQLException, AmbariException {
     Capture<DBAccessor.DBColumnInfo> capturedColumnInfo = newCapture();
+    final DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
+    Configuration configuration = createNiceMock(Configuration.class);
+    Connection connection = createNiceMock(Connection.class);
+    Statement statement = createNiceMock(Statement.class);
+    ResultSet resultSet = createNiceMock(ResultSet.class);
+    Capture<List<DBAccessor.DBColumnInfo>> capturedSettingColumns = EasyMock.newCapture();
 
-    DBAccessor dbAccessor = createStrictMock(DBAccessor.class);
     dbAccessor.addColumn(eq("adminpermission"), capture(capturedColumnInfo));
-    expectLastCall().once();
-
-    Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
-    field.set(upgradeCatalog240, dbAccessor);
+    dbAccessor.createTable(eq("setting"), capture(capturedSettingColumns), eq("id"));
+    expect(configuration.getDatabaseUrl()).andReturn(Configuration.JDBC_IN_MEMORY_URL).anyTimes();
+    expect(dbAccessor.getConnection()).andReturn(connection);
+    expect(connection.createStatement()).andReturn(statement);
+    expect(statement.executeQuery(anyObject(String.class))).andReturn(resultSet);
 
     replay(dbAccessor);
-
+    Module module = new Module() {
+      @Override
+      public void configure(Binder binder) {
+        binder.bind(DBAccessor.class).toInstance(dbAccessor);
+        binder.bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+        binder.bind(EntityManager.class).toInstance(entityManager);
+      }
+    };
+
+    Injector injector = Guice.createInjector(module);
+    UpgradeCatalog240 upgradeCatalog240 = injector.getInstance(UpgradeCatalog240.class);
     upgradeCatalog240.executeDDLUpdates();
 
-    verify(dbAccessor);
-
     DBAccessor.DBColumnInfo columnInfo = capturedColumnInfo.getValue();
     Assert.assertNotNull(columnInfo);
     Assert.assertEquals(UpgradeCatalog240.SORT_ORDER_COL, columnInfo.getName());
@@ -111,12 +135,29 @@ public class UpgradeCatalog240Test {
     Assert.assertEquals(Short.class, columnInfo.getType());
     Assert.assertEquals(1, columnInfo.getDefaultValue());
     Assert.assertEquals(false, columnInfo.isNullable());
+
+    Map<String, Class> expectedCaptures = new HashMap<>();
+    expectedCaptures.put("id", Long.class);
+    expectedCaptures.put("name", String.class);
+    expectedCaptures.put("setting_type", String.class);
+    expectedCaptures.put("content", String.class);
+    expectedCaptures.put("updated_by", String.class);
+    expectedCaptures.put("update_timestamp", Long.class);
+
+    Map<String, Class> actualCaptures = new HashMap<>();
+    for(DBAccessor.DBColumnInfo settingColumnInfo : capturedSettingColumns.getValue()) {
+      actualCaptures.put(settingColumnInfo.getName(), settingColumnInfo.getType());
+    }
+    assertEquals(expectedCaptures, actualCaptures);
+
+    verify(dbAccessor);
   }
 
   @Test
   public void testExecuteDMLUpdates() throws Exception {
     Method addNewConfigurationsFromXml = AbstractUpgradeCatalog.class.getDeclaredMethod("addNewConfigurationsFromXml");
     Method updateAlerts = UpgradeCatalog240.class.getDeclaredMethod("updateAlerts");
+    Method addSettingPermission = UpgradeCatalog240.class.getDeclaredMethod("addSettingPermission");
 
     Capture<String> capturedStatements = newCapture(CaptureType.ALL);
 
@@ -126,15 +167,15 @@ public class UpgradeCatalog240Test {
     UpgradeCatalog240 upgradeCatalog240 = createMockBuilder(UpgradeCatalog240.class)
             .addMockedMethod(addNewConfigurationsFromXml)
             .addMockedMethod(updateAlerts)
+            .addMockedMethod(addSettingPermission)
             .createMock();
 
     Field field = AbstractUpgradeCatalog.class.getDeclaredField("dbAccessor");
     field.set(upgradeCatalog240, dbAccessor);
 
     upgradeCatalog240.addNewConfigurationsFromXml();
-    expectLastCall().once();
     upgradeCatalog240.updateAlerts();
-    expectLastCall().once();
+    upgradeCatalog240.addSettingPermission();
 
     replay(upgradeCatalog240, dbAccessor);
 
@@ -171,11 +212,10 @@ public class UpgradeCatalog240Test {
 
     UpgradeCatalog240 upgradeCatalog240 = new UpgradeCatalog240(injector);
     String inputSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"connection.timeout\",\"display_name\":\"Connection Timeout\",\"value\":5.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before this alert is considered to be CRITICAL\",\"units\":\"seconds\",\"threshold\":\"CRITICAL\"}]}";
-    List<String> params = new ArrayList<String>(Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold"));
+    List<String> params = new ArrayList<>(Arrays.asList("connection.timeout", "checkpoint.time.warning.threshold", "checkpoint.time.critical.threshold"));
     String expectedSource = "{\"path\":\"test_path\",\"type\":\"SCRIPT\",\"parameters\":[{\"name\":\"connection.timeout\",\"display_name\":\"Connection Timeout\",\"value\":5.0,\"type\":\"NUMERIC\",\"description\":\"The maximum time before this alert is considered to be CRITICAL\",\"units\":\"seconds\",\"threshold\":\"CRITICAL\"},{\"name\":\"checkpoint.time.warning.threshold\",\"display_name\":\"Checkpoint Warning\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a warning alert.\",\"units\":\"%\",\"threshold\":\"WARNING\"},{\"name\":\"checkpoint.time.critical.threshold\",\"display_name\":\"Checkpoint Critical\",\"value\":2.0,\"type\":\"PERCENT\",\"description\":\"The percentage of the last checkpoint time greater than the interval in order to trigger a critical alert.\",\"units\":\"%\",\"threshold\":\"CRITICAL\"}]}";
 
     String result = upgradeCatalog240.addParam(inputSource, params);
     Assert.assertEquals(result, expectedSource);
   }
-
 }


[27/33] ambari git commit: AMBARI-15063 : Metrics monitor fails on restart - Commit 2 (avijayan)

Posted by nc...@apache.org.
AMBARI-15063 : Metrics monitor fails on restart - Commit 2 (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8999aee3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8999aee3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8999aee3

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8999aee37b2f4129304f6e93b7928d0bafd68f25
Parents: df2dbe1
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Wed Feb 17 15:20:10 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Wed Feb 17 15:20:10 2016 -0800

----------------------------------------------------------------------
 .../conf/unix/ambari-metrics-monitor                           | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8999aee3/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
index aa4ae02..7464c55 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
+++ b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
@@ -131,7 +131,7 @@ case "$1" in
     echo "Checking for previously running Metric Monitor..."
     if [ -f ${PIDFILE} ]; then
       PID=`cat ${PIDFILE}`
-      if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
+      if [ -z "`ps ax | grep -w ${PID} | grep resource_monitoring`" ]; then
         echo "${PIDFILE} found with no process. Removing ${PID}..."
         rm -f ${PIDFILE}
       else
@@ -152,7 +152,7 @@ case "$1" in
     sleep 2
 
     echo "Verifying ${METRIC_MONITOR} process status..."
-    if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
+    if [ -z "`ps ax | grep -w ${PID} | grep resource_monitoring`" ]; then
       if [ -s ${OUTFILE} ]; then
         echo "ERROR: ${METRIC_MONITOR} start failed. For more details, see ${OUTFILE}:"
         echo "===================="
@@ -173,7 +173,7 @@ case "$1" in
     if [ -f ${PIDFILE} ]; then
       PID=`cat ${PIDFILE}`
       echo "Found ${METRIC_MONITOR} PID: $PID"
-      if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
+      if [ -z "`ps ax | grep -w ${PID} | grep resource_monitoring`" ]; then
         echo "${METRIC_MONITOR} not running. Stale PID File at: $PIDFILE"
         retcode=2
       else


[07/33] ambari git commit: AMBARI-14999. Multi-host option for Templated Dashboards. (Prajwal Rao via yusaku)

Posted by nc...@apache.org.
AMBARI-14999. Multi-host option for Templated Dashboards. (Prajwal Rao via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1e510f59
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1e510f59
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1e510f59

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1e510f590f9caa9ef7fb4e71e9badd54cb36b5c0
Parents: f41fccf
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Feb 16 12:15:43 2016 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Feb 16 12:15:43 2016 -0800

----------------------------------------------------------------------
 ambari-metrics/ambari-metrics-grafana/README.md |  22 ++++++++-
 .../ambari-metrics/datasource.js                |  49 +++++++++++++++----
 .../screenshots/21-multi-templating.png         | Bin 0 -> 92034 bytes
 3 files changed, 60 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1e510f59/ambari-metrics/ambari-metrics-grafana/README.md
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/README.md b/ambari-metrics/ambari-metrics-grafana/README.md
index 7eb84a7..af2b5cb 100644
--- a/ambari-metrics/ambari-metrics-grafana/README.md
+++ b/ambari-metrics/ambari-metrics-grafana/README.md
@@ -33,10 +33,12 @@ Use **ambari-metrics** to visualize metrics exposed via AMS in Grafana.
  - [Save Dashboard](#savedash)
  - [Time Ranges](#timerange)
  - [Edit Panel/Graph](#editpanel)
+ - [Templated Dashboards](#templating)
+    - [Multi Host Templated Dashboards](#multi-templating)
 
 
 ----------
-![enter image description here](screenshots/full-dashboard.png)
+![Full Dashboard](screenshots/full-dashboard.png)
 
 ----------
 <a name="installg"></a>
@@ -257,7 +259,23 @@ http://GRAFANA_HOST:3000
 > 10. When you now add a graph, and select your component and metric, the plotted graph will show you metrics for the selected hostname from the dropdown.
 > 11. The legend on the graph will also now update with the selected host.
 
-**Templalted dashboards do support multiple metrics in a single graph.** 
+**Templated dashboards do support multiple metrics in a single graph.** 
 
 
 ![Templating](screenshots/20-templating.png)
+
+---
+
+<a name="multi-templating"></a>
+### Multi Host Templated Dashboards.
+
+**Templated dashboards now have the ability to filter graphs based on a single host or multiple hosts.**
+
+> 1. Once you've created your templated dashboard, you can edit it gain by clicking on the "cog" on the top, select "Templating".
+> 2. Click on "Edit" for your templating variable.
+> 3. To be able to select Multiiple Hosts, set multi-value selection to "enable" and leave multi-format set to "glob".
+> 4. To have an option for All hosts, select All Value, and set it to "*" and All format to "wildcard".
+> 5. Hit Update and close the templating variables options and you should be now able to select multiple hosts from the dropdown (or "all" hosts at once.)
+
+
+![Multi Host Templating](screenshots/21-multi-templating.png)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e510f59/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
index e89ad0e..ca7ea65 100644
--- a/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
+++ b/ambari-metrics/ambari-metrics-grafana/ambari-metrics/datasource.js
@@ -38,6 +38,7 @@ define([
         }
         var allMetrics = [];
         var appIds = [];
+        //We get a list of components and their associated metrics.
         AmbariMetricsDatasource.prototype.initMetricAppidMapping = function () {
           backendSrv.get(this.url + '/ws/v1/timeline/metrics/metadata')
             .then(function (items) {
@@ -73,7 +74,7 @@ define([
           }
 
           options.url = this.url + options.url;
-          options.inspect = {type: 'discovery'};
+          options.inspect = {type: 'ambarimetrics'};
 
           return backendSrv.datasourceRequest(options);
         };
@@ -82,6 +83,7 @@ define([
          * AMS Datasource  Query
          */
         AmbariMetricsDatasource.prototype.query = function (options) {
+
           var emptyData = function (metric) {
             return {
               data: {
@@ -99,6 +101,7 @@ define([
               }
               var series = [];
               var metricData = res.metrics[0].metrics;
+              // Added hostname to legend for templated dashboards.
               var hostLegend = res.metrics[0].hostname ? ' on ' + res.metrics[0].hostname : '';
               var timeSeries = {};
               if (target.hosts === undefined || target.hosts.trim() === "") {
@@ -122,7 +125,6 @@ define([
             };
 
           };
-
           var getHostAppIdData = function(target) {
             var precision = target.shouldAddPrecision ? '&precision=' + target.precision : '';
             var rate = target.shouldComputeRate ? '._rate._' : '._';
@@ -132,14 +134,16 @@ define([
                 getMetricsData(target)
             );
           };
+          //Check if it's a templated dashboard.
+          var templatedHost = (_.isEmpty(templateSrv.variables)) ? "" : templateSrv.variables[0].options.filter(function(host)
+              { return host.selected; }).map(function(hostName) { return hostName.value; });
 
           var getServiceAppIdData = function(target) {
-            var templatedHost = (_.isEmpty(templateSrv.variables)) ? "" : templateSrv.variables[0].options.filter(function(host)
-              { return host.selected; }).map(function(hostName) { return hostName.value; });
+            var tHost = (_.isEmpty(templateSrv.variables)) ? templatedHost : target.templatedHost;
             var precision = target.shouldAddPrecision ? '&precision=' + target.precision : '';
             var rate = target.shouldComputeRate ? '._rate._' : '._';
             return backendSrv.get(self.url + '/ws/v1/timeline/metrics?metricNames=' + target.metric + rate
-              + target.aggregator + '&hostname=' + templatedHost + '&appId=' + target.app + '&startTime=' + from +
+              + target.aggregator + '&hostname=' + tHost + '&appId=' + target.app + '&startTime=' + from +
               '&endTime=' + to + precision).then(
               getMetricsData(target)
             );
@@ -148,15 +152,43 @@ define([
           // Time Ranges
           var from = Math.floor(options.range.from.valueOf() / 1000);
           var to = Math.floor(options.range.to.valueOf() / 1000);
-          var metricsPromises = _.map(options.targets, function(target) {
+
+          var metricsPromises = [];
+          if (!_.isEmpty(templateSrv.variables)) {
+            if (!_.isEmpty(_.find(templatedHost, function (o) { return o === "*"; }))) {
+              var allHost = templateSrv.variables[0].options.filter(function(all) {
+                return all.text !== "All"; }).map(function(hostName) { return hostName.value; });
+              _.forEach(allHost, function(processHost) {
+              metricsPromises.push(_.map(options.targets, function(target) {
+                target.templatedHost = processHost;
+                console.debug('target app=' + target.app + ',' +
+                  'target metric=' + target.metric + ' on host=' + target.templatedHost);
+                return getServiceAppIdData(target);
+              }));
+            });
+            } else {
+              _.forEach(templatedHost, function(processHost) {
+              metricsPromises.push(_.map(options.targets, function(target) {
+                target.templatedHost = processHost;
+                console.debug('target app=' + target.app + ',' +
+                  'target metric=' + target.metric + ' on host=' + target.templatedHost);
+                return getServiceAppIdData(target);
+              }));
+            });
+            }
+
+            metricsPromises = _.flatten(metricsPromises);
+          } else {
+            metricsPromises = _.map(options.targets, function(target) {
               console.debug('target app=' + target.app + ',' +
-                'target metric=' + target.metric + ' on host=' + target.hosts);
+                'target metric=' + target.metric + ' on host=' + target.tempHost);
               if (!!target.hosts) {
                 return getHostAppIdData(target);
               } else {
                 return getServiceAppIdData(target);
               }
             });
+          }
 
           return $q.all(metricsPromises).then(function(metricsDataArray) {
             var data = _.map(metricsDataArray, function(metricsData) {
@@ -288,8 +320,7 @@ define([
           ]);
           return aggregatorsPromise;
         };
-
         return AmbariMetricsDatasource;
       });
     }
-);
\ No newline at end of file
+);

http://git-wip-us.apache.org/repos/asf/ambari/blob/1e510f59/ambari-metrics/ambari-metrics-grafana/screenshots/21-multi-templating.png
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-grafana/screenshots/21-multi-templating.png b/ambari-metrics/ambari-metrics-grafana/screenshots/21-multi-templating.png
new file mode 100644
index 0000000..9855302
Binary files /dev/null and b/ambari-metrics/ambari-metrics-grafana/screenshots/21-multi-templating.png differ


[08/33] ambari git commit: AMBARI-14986 Combo Search: Create auto suggest for Serivce Component filters (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-14986 Combo Search: Create auto suggest for Serivce Component filters (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/21fd70c9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/21fd70c9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/21fd70c9

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 21fd70c9bf911d56a91cad514a033fbff3c5bcb4
Parents: 1e510f5
Author: Richard Zang <rz...@apache.org>
Authored: Tue Feb 16 14:27:47 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Tue Feb 16 14:27:47 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host/combo_search_box.js | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/21fd70c9/ambari-web/app/controllers/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/combo_search_box.js b/ambari-web/app/controllers/main/host/combo_search_box.js
index 1c2a87f..d835e97 100644
--- a/ambari-web/app/controllers/main/host/combo_search_box.js
+++ b/ambari-web/app/controllers/main/host/combo_search_box.js
@@ -82,13 +82,10 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
           ]);
           break;
         case 'service':
-          callback(App.Service.find().toArray().mapProperty('serviceName'));
+          callback(App.Service.find().toArray().mapProperty('serviceName'), {preserveOrder: true});
           break;
         case 'component':
-          callback(App.MasterComponent.find().toArray().mapProperty('componentName')
-              .concat(App.SlaveComponent.find().toArray().mapProperty('componentName'))
-              .concat(App.ClientComponent.find().toArray().mapProperty('componentName'))
-            ,{preserveOrder: true});
+          callback(App.HostComponent.find().toArray().mapProperty('componentName').uniq(), {preserveOrder: true});
           break;
         case 'state':
           callback([


[16/33] ambari git commit: AMBARI-15070. Graph scale behaviour is incorrect if time range is switched before graph data API call is complete (alexantonenko)

Posted by nc...@apache.org.
AMBARI-15070. Graph scale behaviour is incorrect if time range is switched before graph data API call is complete (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a91890a8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a91890a8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a91890a8

Branch: refs/heads/branch-dev-patch-upgrade
Commit: a91890a819e77638d5cb371e49a9a5d0fdebf4d1
Parents: 506bb8d
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Feb 17 15:20:14 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Feb 17 17:46:45 2016 +0200

----------------------------------------------------------------------
 .../app/mixins/common/widgets/widget_mixin.js   | 62 +++++++++++----
 ambari-web/app/utils/ajax/ajax.js               | 12 +++
 .../app/views/common/chart/linear_time.js       | 84 +++++++++++++++-----
 .../main/admin/stack_upgrade/versions_view.js   |  6 +-
 .../test/mixins/common/widget_mixin_test.js     | 18 ++++-
 ambari-web/test/utils/ajax/ajax_test.js         | 32 ++++++++
 .../test/views/common/chart/linear_time_test.js | 10 ++-
 7 files changed, 179 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/app/mixins/common/widgets/widget_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js
index 6d65c33..13e55f2 100644
--- a/ambari-web/app/mixins/common/widgets/widget_mixin.js
+++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js
@@ -120,9 +120,18 @@ App.WidgetMixin = Ember.Mixin.create({
           startCallName: 'getHostComponentMetrics',
           successCallback: this.getHostComponentMetricsSuccessCallback,
           errorCallback: this.getMetricsErrorCallback,
-          completeCallback: function () {
+          completeCallback: function (xhr) {
             requestCounter--;
             if (requestCounter === 0) this.onMetricsLoaded();
+            if (this.get('graphView')) {
+              var graph = this.get('childViews') && this.get('childViews').findProperty('runningRequests');
+              if (graph) {
+                var requestsArrayName = graph.get('isPopup') ? 'runningPopupRequests' : 'runningRequests';
+                graph.set(requestsArrayName, graph.get(requestsArrayName).reject(function (item) {
+                  return item === xhr;
+                }));
+              }
+            }
           }
         });
       } else {
@@ -132,9 +141,18 @@ App.WidgetMixin = Ember.Mixin.create({
           startCallName: 'getServiceComponentMetrics',
           successCallback: this.getMetricsSuccessCallback,
           errorCallback: this.getMetricsErrorCallback,
-          completeCallback: function () {
+          completeCallback: function (xhr) {
             requestCounter--;
             if (requestCounter === 0) this.onMetricsLoaded();
+            if (this.get('graphView')) {
+              var graph = this.get('childViews') && this.get('childViews').findProperty('runningRequests');
+              if (graph) {
+                var requestsArrayName = graph.get('isPopup') ? 'runningPopupRequests' : 'runningRequests';
+                graph.set(requestsArrayName, graph.get(requestsArrayName).reject(function (item) {
+                  return item === xhr;
+                }));
+              }
+            }
           }
         });
       }
@@ -192,7 +210,7 @@ App.WidgetMixin = Ember.Mixin.create({
    * @returns {$.ajax}
    */
   getServiceComponentMetrics: function (request) {
-    return App.ajax.send({
+    var xhr = App.ajax.send({
       name: 'widgets.serviceComponent.metrics.get',
       sender: this,
       data: {
@@ -201,6 +219,14 @@ App.WidgetMixin = Ember.Mixin.create({
         metricPaths: this.prepareMetricPaths(request.metric_paths)
       }
     });
+    if (this.get('graphView')) {
+      var graph = this.get('childViews') && this.get('childViews').findProperty('runningRequests');
+      if (graph) {
+        var requestsArrayName = graph.get('isPopup') ? 'runningPopupRequests' : 'runningRequests';
+        graph.get(requestsArrayName).push(xhr);
+      }
+    }
+    return xhr;
   },
 
   /**
@@ -230,15 +256,21 @@ App.WidgetMixin = Ember.Mixin.create({
     var metricPaths = this.prepareMetricPaths(request.metric_paths);
 
     if (metricPaths.length) {
-      return App.ajax.send({
-        name: 'widgets.hostComponent.metrics.get',
-        sender: this,
-        data: {
-          componentName: request.component_name,
-          metricPaths: this.prepareMetricPaths(request.metric_paths),
-          hostComponentCriteria: this.computeHostComponentCriteria(request)
-        }
-      });
+      var xhr = App.ajax.send({
+          name: 'widgets.hostComponent.metrics.get',
+          sender: this,
+          data: {
+            componentName: request.component_name,
+            metricPaths: this.prepareMetricPaths(request.metric_paths),
+            hostComponentCriteria: this.computeHostComponentCriteria(request)
+          }
+        }),
+        graph = this.get('graphView') && this.get('childViews') && this.get('childViews').findProperty('runningRequests');
+      if (graph) {
+        var requestsArrayName = graph.get('isPopup') ? 'runningPopupRequests' : 'runningRequests';
+        graph.get(requestsArrayName).push(xhr);
+      }
+      return xhr;
     }
     return jQuery.Deferred().reject().promise();
   },
@@ -300,7 +332,7 @@ App.WidgetMixin = Ember.Mixin.create({
    * @param {string} errorThrown
    */
   getMetricsErrorCallback: function (xhr, textStatus, errorThrown) {
-    if (this.get('graphView')) {
+    if (this.get('graphView') && !xhr.isForcedAbort) {
       var graph = this.get('childViews') && this.get('childViews').findProperty('_showMessage');
       if (graph) {
         if (xhr.readyState == 4 && xhr.status) {
@@ -778,9 +810,9 @@ App.WidgetLoadAggregator = Em.Object.create({
                 subRequest.errorCallback.call(subRequest.context, xhr, textStatus, errorThrown);
               }
             }, this);
-          }).always(function () {
+          }).always(function (xhr) {
               _request.subRequests.forEach(function (subRequest) {
-                subRequest.completeCallback.call(subRequest.context);
+                subRequest.completeCallback.call(subRequest.context, xhr);
               }, this);
             });
       })(bulks[id]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index f2174f3..3ccd01c 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -3054,6 +3054,18 @@ var ajax = Em.Object.extend({
    */
   defaultErrorKDCHandler: function(opt, msg) {
     return App.showInvalidKDCPopup(opt, msg);
+  },
+
+  /**
+   * Abort all requests stored in the certain array
+   * @param requestsArray
+   */
+  abortRequests: function (requestsArray) {
+    requestsArray.forEach(function (xhr) {
+      xhr.isForcedAbort = true;
+      xhr.abort();
+    });
+    requestsArray.clear();
   }
 
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/app/views/common/chart/linear_time.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/chart/linear_time.js b/ambari-web/app/views/common/chart/linear_time.js
index c7dd3f9..064bec3 100644
--- a/ambari-web/app/views/common/chart/linear_time.js
+++ b/ambari-web/app/views/common/chart/linear_time.js
@@ -167,6 +167,20 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
    */
   seriesTemplate: null,
 
+  /**
+   * Incomplete metrics requests
+   * @type {array}
+   * @default []
+   */
+  runningRequests: [],
+
+  /**
+   * Incomplete metrics requests for detailed view
+   * @type {array}
+   * @default []
+   */
+  runningPopupRequests: [],
+
   _containerSelector: Em.computed.format('#{0}-container', 'id'),
 
   _popupSelector: Em.computed.concat('', '_containerSelector', 'popupSuffix'),
@@ -295,6 +309,7 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
     this.$("[rel='ZoomInTooltip']").tooltip('destroy');
     $(this.get('_containerSelector') + ' li.line').off();
     $(this.get('_popupSelector') + ' li.line').off();
+    App.ajax.abortRequests(this.get('runningRequests'));
   },
 
   registerGraph: function () {
@@ -307,16 +322,26 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
   },
 
   loadData: function () {
-    if (this.get('loadGroup') && !this.get('isPopup')) {
+    var self = this,
+      isPopup = this.get('isPopup');
+    if (this.get('loadGroup') && !isPopup) {
       return App.ChartLinearTimeView.LoadAggregator.add(this, this.get('loadGroup'));
     } else {
-      return App.ajax.send({
-        name: this.get('ajaxIndex'),
-        sender: this,
-        data: this.getDataForAjaxRequest(),
-        success: 'loadDataSuccessCallback',
-        error: 'loadDataErrorCallback'
-      });
+      var requestsArrayName = isPopup ? 'runningPopupRequests' : 'runningRequests',
+        request = App.ajax.send({
+          name: this.get('ajaxIndex'),
+          sender: this,
+          data: this.getDataForAjaxRequest(),
+          success: 'loadDataSuccessCallback',
+          error: 'loadDataErrorCallback',
+          callback: function () {
+            self.set(requestsArrayName, self.get(requestsArrayName).reject(function (item) {
+              return item === request;
+            }));
+          }
+        });
+      this.get(requestsArrayName).push(request);
+      return request;
     }
   },
 
@@ -356,15 +381,17 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
   },
 
   loadDataErrorCallback: function (xhr, textStatus, errorThrown) {
-    this.set('isReady', true);
-    if (xhr.readyState == 4 && xhr.status) {
-      textStatus = xhr.status + " " + textStatus;
+    if (!xhr.isForcedAbort) {
+      this.set('isReady', true);
+      if (xhr.readyState == 4 && xhr.status) {
+        textStatus = xhr.status + " " + textStatus;
+      }
+      this._showMessage('warn', this.t('graphs.error.title'), this.t('graphs.error.message').format(textStatus, errorThrown));
+      this.setProperties({
+        hasData: false,
+        isExportButtonHidden: true
+      });
     }
-    this._showMessage('warn', this.t('graphs.error.title'), this.t('graphs.error.message').format(textStatus, errorThrown));
-    this.setProperties({
-      hasData: false,
-      isExportButtonHidden: true
-    });
   },
 
   /**
@@ -999,6 +1026,7 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
           customDurationFormatted: targetView.get('customDurationFormatted'),
           isPopup: false
         });
+        App.ajax.abortRequests(this.get('graph.runningPopupRequests'));
         this._super();
       },
 
@@ -1034,6 +1062,7 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
         this.set('childViews.firstObject.currentTimeRangeIndex', index);
         this.set('currentTimeIndex', index);
         self.set('currentTimeIndex', index);
+        App.ajax.abortRequests(this.get('graph.runningPopupRequests'));
       },
       currentTimeIndex: self.get('currentTimeIndex'),
 
@@ -1082,6 +1111,9 @@ App.ChartLinearTimeView = Ember.View.extend(App.ExportMetricsMixin, {
       customEndTime: customEndTime,
       customDurationFormatted: customDurationFormatted
     });
+    if (index !== 8 || targetView.get('customStartTime') && targetView.get('customEndTime')) {
+      App.ajax.abortRequests(this.get('runningRequests'));
+    }
   }.observes('parentView.parentView.currentTimeRangeIndex', 'parentView.currentTimeRangeIndex', 'parentView.parentView.customStartTime', 'parentView.customStartTime', 'parentView.parentView.customEndTime', 'parentView.customEndTime'),
   timeUnitSeconds: 3600,
   timeUnitSecondsSetter: function () {
@@ -1408,25 +1440,33 @@ App.ChartLinearTimeView.LoadAggregator = Em.Object.create({
       (function (_request) {
         var fields = self.formatRequestData(_request);
         var hostName = (_request.context.get('content')) ? _request.context.get('content.hostName') : "";
-
-        App.ajax.send({
+        var xhr = App.ajax.send({
           name: _request.name,
           sender: _request.context,
           data: {
             fields: fields,
             hostName: hostName
           }
-        }).done(function (response) {
+        });
+
+        xhr.done(function (response) {
           console.time('==== runRequestsDone');
           _request.subRequests.forEach(function (subRequest) {
             subRequest.context._refreshGraph.call(subRequest.context, response);
           }, this);
           console.timeEnd('==== runRequestsDone');
         }).fail(function (jqXHR, textStatus, errorThrown) {
-          _request.subRequests.forEach(function (subRequest) {
-            subRequest.context.loadDataErrorCallback.call(subRequest.context, jqXHR, textStatus, errorThrown);
-          }, this);
+          if (!jqXHR.isForcedAbort) {
+            _request.subRequests.forEach(function (subRequest) {
+              subRequest.context.loadDataErrorCallback.call(subRequest.context, jqXHR, textStatus, errorThrown);
+            }, this);
+          }
+        }).always(function () {
+          _request.context.set('runningRequests', _request.context.get('runningRequests').reject(function (item) {
+            return item === xhr;
+          }));
         });
+        _request.context.get('runningRequests').push(xhr);
       })(bulks[id]);
     }
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js b/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
index d535300..3649513 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
@@ -199,12 +199,8 @@ App.MainAdminStackVersionsView = Em.View.extend({
    * stop polling upgrade state
    */
   willDestroyElement: function () {
-    var runningCheckRequests = this.get('controller.runningCheckRequests');
     window.clearTimeout(this.get('updateTimer'));
-    runningCheckRequests.forEach(function (request) {
-      request.abort();
-    });
-    runningCheckRequests.clear();
+    App.ajax.abortRequests(this.get('controller.runningCheckRequests'));
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/test/mixins/common/widget_mixin_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/widget_mixin_test.js b/ambari-web/test/mixins/common/widget_mixin_test.js
index e32026a..c70150d 100644
--- a/ambari-web/test/mixins/common/widget_mixin_test.js
+++ b/ambari-web/test/mixins/common/widget_mixin_test.js
@@ -394,6 +394,7 @@ describe('App.WidgetMixin', function () {
       cases = [
         {
           graphView: null,
+          isForcedAbort: false,
           metricsLength: 1,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
@@ -401,6 +402,7 @@ describe('App.WidgetMixin', function () {
         },
         {
           graphView: {},
+          isForcedAbort: false,
           metricsLength: 1,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
@@ -409,6 +411,7 @@ describe('App.WidgetMixin', function () {
         {
           graphView: {},
           childViews: [],
+          isForcedAbort: false,
           metricsLength: 1,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
@@ -417,6 +420,7 @@ describe('App.WidgetMixin', function () {
         {
           graphView: {},
           childViews: [Em.Object.create({})],
+          isForcedAbort: false,
           metricsLength: 1,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
@@ -425,10 +429,20 @@ describe('App.WidgetMixin', function () {
         {
           graphView: {},
           childViews: [Em.Object.create({}), view],
+          isForcedAbort: false,
           metricsLength: 0,
           showMessageCallCount: 1,
           isExportButtonHidden: true,
           title: 'graph view is available'
+        },
+        {
+          graphView: {},
+          childViews: [Em.Object.create({}), view],
+          isForcedAbort: true,
+          metricsLength: 1,
+          showMessageCallCount: 0,
+          isExportButtonHidden: false,
+          title: 'request is aborted'
         }
       ],
       messageCases = [
@@ -471,7 +485,9 @@ describe('App.WidgetMixin', function () {
             graphView: item.graphView,
             childViews: item.childViews
           });
-          obj.getMetricsErrorCallback({});
+          obj.getMetricsErrorCallback({
+            isForcedAbort: item.isForcedAbort
+          });
         });
 
         it('metrics array', function () {

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/test/utils/ajax/ajax_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/ajax/ajax_test.js b/ambari-web/test/utils/ajax/ajax_test.js
index b0749df..747ccff 100644
--- a/ambari-web/test/utils/ajax/ajax_test.js
+++ b/ambari-web/test/utils/ajax/ajax_test.js
@@ -162,4 +162,36 @@ describe('App.ajax', function() {
       });
     });
   });
+  
+  describe('#abortRequests', function () {
+
+    var xhr = {
+        abort: Em.K
+      },
+      requests;
+
+    beforeEach(function () {
+      sinon.spy(xhr, 'abort');
+      xhr.isForcedAbort = false;
+      requests = [xhr, xhr];
+      App.ajax.abortRequests(requests);
+    });
+
+    afterEach(function () {
+      xhr.abort.restore();
+    });
+
+    it('should abort all requests', function () {
+      expect(xhr.abort.calledTwice).to.be.true;
+    });
+
+    it('should mark request as aborted', function () {
+      expect(xhr.isForcedAbort).to.be.true;
+    });
+
+    it('should clear requests array', function () {
+      expect(requests).to.have.length(0);
+    });
+    
+  });
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/a91890a8/ambari-web/test/views/common/chart/linear_time_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/common/chart/linear_time_test.js b/ambari-web/test/views/common/chart/linear_time_test.js
index 7d76520..c547249 100644
--- a/ambari-web/test/views/common/chart/linear_time_test.js
+++ b/ambari-web/test/views/common/chart/linear_time_test.js
@@ -543,7 +543,8 @@ describe('App.ChartLinearTimeView.LoadAggregator', function () {
       sinon.stub(App.ajax, 'send', function(){
         return {
           done: Em.K,
-          fail: Em.K
+          fail: Em.K,
+          always: Em.K
         }
       });
     });
@@ -552,7 +553,12 @@ describe('App.ChartLinearTimeView.LoadAggregator', function () {
       aggregator.formatRequestData.restore();
     });
     it("valid request is sent", function () {
-      var context = Em.Object.create({content: {hostName: 'host1'}});
+      var context = Em.Object.create({
+        content: {
+          hostName: 'host1'
+        },
+        runningRequests: []
+      });
       var requests = {
         'r1': {
           name: 'r1',


[06/33] ambari git commit: AMBARI-15022: Add Custom action to sync Standby with Master (goutamtadi via jaoki)

Posted by nc...@apache.org.
AMBARI-15022: Add Custom action to sync Standby with Master (goutamtadi via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f41fccfc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f41fccfc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f41fccfc

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f41fccfc829919546fcb8e2a79a36a4b477affa6
Parents: f18601c
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 16 11:33:30 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 16 11:33:30 2016 -0800

----------------------------------------------------------------------
 .../common-services/HAWQ/2.0.0/metainfo.xml     | 14 ++++--
 .../HAWQ/2.0.0/package/scripts/hawqmaster.py    |  2 +-
 .../HAWQ/2.0.0/package/scripts/hawqsegment.py   |  2 +-
 .../HAWQ/2.0.0/package/scripts/hawqstandby.py   | 10 ++++-
 .../hawq/activateStandby/step3_controller.js    |  2 +-
 ambari-web/app/controllers/main/service/item.js | 46 ++++++++++----------
 ambari-web/app/messages.js                      |  5 ++-
 ambari-web/app/models/host_component.js         | 27 ++++++++----
 ambari-web/app/utils/helper.js                  | 14 ++++++
 ambari-web/app/views/main/service/item.js       | 41 +++++++++--------
 10 files changed, 104 insertions(+), 59 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
index 7a71604..fd145f4 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
@@ -39,7 +39,7 @@
           </commandScript>
           <customCommands>
             <customCommand>
-              <name>IMMEDIATE_STOP_CLUSTER</name>
+              <name>IMMEDIATE_STOP_HAWQ_SERVICE</name>
               <commandScript>
                 <script>scripts/hawqmaster.py</script>
                 <scriptType>PYTHON</scriptType>
@@ -78,7 +78,15 @@
           </commandScript>
           <customCommands>
             <customCommand>
-              <name>ACTIVATE_STANDBY</name>
+              <name>ACTIVATE_HAWQ_STANDBY</name>
+              <commandScript>
+                <script>scripts/hawqstandby.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>1200</timeout>
+              </commandScript>
+            </customCommand>
+            <customCommand>
+              <name>RESYNC_HAWQ_STANDBY</name>
               <commandScript>
                 <script>scripts/hawqstandby.py</script>
                 <scriptType>PYTHON</scriptType>
@@ -110,7 +118,7 @@
           </commandScript>
           <customCommands>
             <customCommand>
-              <name>IMMEDIATE_STOP</name>
+              <name>IMMEDIATE_STOP_HAWQ_SEGMENT</name>
               <commandScript>
                 <script>scripts/hawqsegment.py</script>
                 <scriptType>PYTHON</scriptType>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
index 8c7b0b5..d2f9ad0 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
@@ -51,7 +51,7 @@ class HawqMaster(Script):
     from hawqstatus import get_pid_file
     check_process_status(get_pid_file())
 
-  def immediate_stop_cluster(self, env):
+  def immediate_stop_hawq_service(self, env):
     master_helper.stop(hawq_constants.IMMEDIATE, hawq_constants.CLUSTER)
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
index 6bc9802..0a597b6 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
@@ -73,7 +73,7 @@ class HawqSegment(Script):
     from hawqstatus import get_pid_file
     check_process_status(get_pid_file())
 
-  def immediate_stop(self, env):
+  def immediate_stop_hawq_segment(self, env):
     self.stop(env, mode=hawq_constants.IMMEDIATE)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
index 7da7f6d..0f52b9e 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 from resource_management import Script
 from resource_management.libraries.functions.check_process_status import check_process_status
+from resource_management.core.logger import Logger
 
 import master_helper
 import common
@@ -51,10 +52,15 @@ class HawqStandby(Script):
     from hawqstatus import get_pid_file
     check_process_status(get_pid_file())
 
-  def activate_standby(self, env):
+  def activate_hawq_standby(self, env):
     import utils
     utils.exec_hawq_operation(hawq_constants.ACTIVATE, "{0} -a -M {1} -v".format(hawq_constants.STANDBY, hawq_constants.FAST))
-
+  def resync_hawq_standby(self,env):
+    import params
+    import utils
+    Logger.info("Re-synchronizing HAWQ Standby..")
+    utils.exec_hawq_operation(hawq_constants.INIT, "{0} -n -a -v -M {1}".format(hawq_constants.STANDBY, hawq_constants.FAST))
+    Logger.info("HAWQ Standby host {0} Re-Sync successful".format(params.hostname))
 
 if __name__ == "__main__":
     HawqStandby().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/controllers/main/admin/highAvailability/hawq/activateStandby/step3_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/hawq/activateStandby/step3_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/hawq/activateStandby/step3_controller.js
index 55da44e..7dbf992 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/hawq/activateStandby/step3_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/hawq/activateStandby/step3_controller.js
@@ -25,7 +25,7 @@ App.ActivateHawqStandbyWizardStep3Controller = App.HighAvailabilityProgressPageC
 
   clusterDeployState: 'ACTIVATE_HAWQ_STANDBY',
 
-  hawqActivateStandbyCustomCommand: "ACTIVATE_STANDBY",
+  hawqActivateStandbyCustomCommand: "ACTIVATE_HAWQ_STANDBY",
 
   hawqServiceName: "HAWQ",
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/controllers/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/item.js b/ambari-web/app/controllers/main/service/item.js
index a127e92..e26a884 100644
--- a/ambari-web/app/controllers/main/service/item.js
+++ b/ambari-web/app/controllers/main/service/item.js
@@ -524,29 +524,6 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
   },
 
   /**
-   * On click handler for hawq stop cluster command from items menu
-   */
-
-  immediateStopHawqCluster: function(context) {
-    var controller = this;
-    return App.showConfirmationPopup(function() {
-      App.ajax.send({
-        name: 'service.item.executeCustomCommand',
-        sender: controller,
-        data: {
-          command: context.command,
-          context: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
-          hosts: App.Service.find(context.service).get('hostComponents').findProperty('componentName', 'HAWQMASTER').get('hostName'),
-          serviceName: context.service,
-          componentName: context.component
-        },
-        success : 'executeCustomCommandSuccessCallback',
-        error : 'executeCustomCommandErrorCallback'
-      });
-    });
-  },
-
-  /**
    * On click handler for rebalance Hdfs command from items menu
    */
   rebalanceHdfsNodes: function () {
@@ -925,6 +902,29 @@ App.MainServiceItemController = Em.Controller.extend(App.SupportClientConfigsDow
   },
 
   /**
+   * On click handler for custom hawq command from items menu
+   * @param context
+   */
+  executeHawqCustomCommand: function(context) {
+    var controller = this;
+    return App.showConfirmationPopup(function() {
+      App.ajax.send({
+        name : 'service.item.executeCustomCommand',
+        sender: controller,
+        data : {
+          command : context.command,
+          context : context.label,
+          hosts : App.Service.find(context.service).get('hostComponents').findProperty('componentName', context.component).get('hostName'),
+          serviceName : context.service,
+          componentName : context.component
+        },
+        success : 'executeCustomCommandSuccessCallback',
+        error : 'executeCustomCommandErrorCallback'
+      });
+    });
+  },
+
+  /**
    * On click handler for custom command from items menu
    * @param context
    */

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index 08ff073..8186088 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1684,9 +1684,12 @@ Em.I18n.translations = {
   'services.service.actions.run.stopLdapKnox.title':'Stop Demo LDAP Knox Gateway',
   'services.service.actions.run.stopLdapKnox.context':'Stop Demo LDAP',
   'services.service.actions.run.startStopLdapKnox.error': 'Error during remote command: ',
-  'services.service.actions.run.immediateStopHawqCluster.context':'Stop HAWQ Cluster (Immediate Mode)',
+  'services.service.actions.run.immediateStopHawqService.context':'Stop HAWQ Service (Immediate Mode)',
+  'services.service.actions.run.immediateStopHawqService.label':'Stop HAWQ Service (Immediate Mode)',
   'services.service.actions.run.immediateStopHawqSegment.label':'Stop (Immediate Mode)',
   'services.service.actions.run.immediateStopHawqSegment.context':'Stop HAWQ Segment (Immediate Mode)',
+  'services.service.actions.run.resyncHawqStandby.context':'Re-Sync HAWQ Standby',
+  'services.service.actions.run.resyncHawqStandby.label':'Re-Synchronize HAWQ Standby',
   'services.service.actions.manage_configuration_groups.short':'Manage Config Groups',
   'services.service.actions.serviceActions':'Service Actions',
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index 4782190..d3d4ab9 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -271,6 +271,7 @@ App.HostComponentActionMap = {
     var RA = ctx.get('controller.content.hostComponents').findProperty('componentName', 'RANGER_ADMIN');
     var HM = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQMASTER');
     var HS = ctx.get('controller.content.hostComponents').findProperty('componentName', 'HAWQSTANDBY');
+    var HMComponent = App.MasterComponent.find('HAWQMASTER');
     return {
       RESTART_ALL: {
         action: 'restartAllHostComponents',
@@ -374,20 +375,29 @@ App.HostComponentActionMap = {
         hasSubmenu: ctx.get('controller.isSeveralClients'),
         submenuOptions: ctx.get('controller.clientComponents')
       },
-      IMMEDIATE_STOP_CLUSTER: {
-        action: 'immediateStopHawqCluster',
-        customCommand: 'IMMEDIATE_STOP_CLUSTER',
-        context: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
-        label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
+      IMMEDIATE_STOP_HAWQ_SERVICE: {
+        action: 'executeHawqCustomCommand',
+        customCommand: 'IMMEDIATE_STOP_HAWQ_SERVICE',
+        context: Em.I18n.t('services.service.actions.run.immediateStopHawqService.context'),
+        label: Em.I18n.t('services.service.actions.run.immediateStopHawqService.label'),
         cssClass: 'icon-stop',
         disabled: !HM || HM.get('workStatus') != App.HostComponentStatus.started
       },
-      IMMEDIATE_STOP: {
-        customCommand: 'IMMEDIATE_STOP',
+      IMMEDIATE_STOP_HAWQ_SEGMENT: {
+        customCommand: 'IMMEDIATE_STOP_HAWQ_SEGMENT',
         context: Em.I18n.t('services.service.actions.run.immediateStopHawqSegment.context'),
         label: Em.I18n.t('services.service.actions.run.immediateStopHawqSegment.label'),
         cssClass: 'icon-stop'
       },
+      RESYNC_HAWQ_STANDBY: {
+        action: 'executeHawqCustomCommand',
+        customCommand: 'RESYNC_HAWQ_STANDBY',
+        context: Em.I18n.t('services.service.actions.run.resyncHawqStandby.context'),
+        label: Em.I18n.t('services.service.actions.run.resyncHawqStandby.label'),
+        cssClass: 'icon-refresh',
+        isHidden : App.get('isSingleNode') || !HS ,
+        disabled: !((!!HMComponent && HMComponent.get('startedCount') === 1) && (!!HS && HS.get('workStatus') === App.HostComponentStatus.started))
+      },
       MASTER_CUSTOM_COMMAND: {
         action: 'executeCustomCommand',
         cssClass: 'icon-play-circle',
@@ -407,9 +417,10 @@ App.HostComponentActionMap = {
         isHidden: App.get('isSingleNode') || HS,
         disabled: false
       },
-      TOGGLE_ACTIVATE_HAWQ_STANDBY: {
+      ACTIVATE_HAWQ_STANDBY: {
         action: 'activateHawqStandby',
         label: Em.I18n.t('admin.activateHawqStandby.button.enable'),
+        context: Em.I18n.t('admin.activateHawqStandby.button.enable'),
         cssClass: 'icon-arrow-up',
         isHidden: App.get('isSingleNode') || !HS,
         disabled: false

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/utils/helper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/helper.js b/ambari-web/app/utils/helper.js
index a1d4b30..8f4985f 100644
--- a/ambari-web/app/utils/helper.js
+++ b/ambari-web/app/utils/helper.js
@@ -659,6 +659,20 @@ App.format = {
     if (result === ' Refreshqueues ResourceManager') {
       result = Em.I18n.t('services.service.actions.run.yarnRefreshQueues.title');
     }
+ // HAWQ custom commands on back Ops page.
+    if (result === ' Resync Hawq Standby HAWQ Standby Master') {
+        result = Em.I18n.t('services.service.actions.run.resyncHawqStandby.label');
+     }
+    if (result === ' Immediate Stop Hawq Service HAWQ Master') {
+        result = Em.I18n.t('services.service.actions.run.immediateStopHawqService.label');
+     }
+    if (result === ' Immediate Stop Hawq Segment HAWQ Segment') {
+        result = Em.I18n.t('services.service.actions.run.immediateStopHawqSegment.label');
+     }
+    if(result === ' Activate Hawq Standby HAWQ Standby Master') {
+        result = Em.I18n.t('admin.activateHawqStandby.button.enable');
+    }
+    //<---End HAWQ custom commands--->
     return result;
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f41fccfc/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index d5d65e4..e74a588 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -35,9 +35,9 @@ App.MainServiceItemView = Em.View.extend({
     'RESOURCEMANAGER': ['DECOMMISSION', 'REFRESHQUEUES'],
     'HBASE_MASTER': ['DECOMMISSION'],
     'KNOX_GATEWAY': ['STARTDEMOLDAP','STOPDEMOLDAP'],
-    'HAWQMASTER': ['IMMEDIATE_STOP_CLUSTER'],
-    'HAWQSTANDBY': ['ACTIVATE_STANDBY'],
-    'HAWQSEGMENT': ['IMMEDIATE_STOP']
+    'HAWQMASTER': ['IMMEDIATE_STOP_HAWQ_SERVICE'],
+    'HAWQSEGMENT': ['IMMEDIATE_STOP_HAWQ_SEGMENT'],
+    'HAWQSTANDBY' : ['RESYNC_HAWQ_STANDBY','ACTIVATE_HAWQ_STANDBY']
   },
 
    addActionMap: function() {
@@ -172,7 +172,6 @@ App.MainServiceItemView = Em.View.extend({
             break;
           case 'HAWQ':
             options.push(actionMap.TOGGLE_ADD_HAWQ_STANDBY);
-            options.push(actionMap.TOGGLE_ACTIVATE_HAWQ_STANDBY);
             break;
         }
       }
@@ -198,21 +197,25 @@ App.MainServiceItemView = Em.View.extend({
         });
       }
 
-      var hawqMasterComponent = App.StackServiceComponent.find().findProperty('componentName','HAWQMASTER');
-      if (serviceName === 'HAWQ' && hawqMasterComponent) {
-        var hawqMasterCustomCommands = hawqMasterComponent.get('customCommands');
-        customCommandToStopCluster = 'IMMEDIATE_STOP_CLUSTER';
-        if (hawqMasterCustomCommands && hawqMasterCustomCommands.contains(customCommandToStopCluster)) {
-          options.push(self.createOption(actionMap.IMMEDIATE_STOP_CLUSTER, {
-            label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
-            context: {
-              label: Em.I18n.t('services.service.actions.run.immediateStopHawqCluster.context'),
-              service: hawqMasterComponent.get('serviceName'),
-              component: hawqMasterComponent.get('componentName'),
-              command: customCommandToStopCluster
-            }
-          }));
-        }
+      /**
+       * Display all custom commands of Master and StandBy on Service page.
+       **/
+      if(serviceName === 'HAWQ') {
+        var hawqMasterComponent = App.StackServiceComponent.find().findProperty('componentName','HAWQMASTER');
+        var hawqStandByComponent = App.StackServiceComponent.find().findProperty('componentName','HAWQSTANDBY');
+        components = [hawqMasterComponent,hawqStandByComponent]
+        components.forEach(function(component){
+          component.get('customCommands').forEach(function(command){
+            options.push(self.createOption(actionMap[command], {
+              context: {
+                label: actionMap[command].context,
+                service: component.get('serviceName'),
+                component: component.get('componentName'),
+                command: command
+              }
+            }));
+          });
+        });
       }
 
       self.addActionMap().filterProperty('service', serviceName).forEach(function(item) {


[21/33] ambari git commit: AMBARI-15036. Return privilege information with results from GroupResourceProvider (rlevas)

Posted by nc...@apache.org.
AMBARI-15036. Return privilege information with results from GroupResourceProvider (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5a9bb715
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5a9bb715
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5a9bb715

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 5a9bb715811f93104cc593215049a861c13d45d0
Parents: c282a0e
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Feb 17 14:14:31 2016 -0500
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Feb 17 14:14:39 2016 -0500

----------------------------------------------------------------------
 .../api/resources/GroupResourceDefinition.java  |   1 +
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +
 .../api/services/GroupPrivilegeService.java     |  76 ++++
 .../server/api/services/GroupService.java       |  11 +
 .../internal/DefaultProviderModule.java         |   2 +
 .../GroupPrivilegeResourceProvider.java         | 237 ++++++++++++
 .../ambari/server/controller/spi/Resource.java  |   2 +
 .../api/services/GroupPrivilegeServiceTest.java | 109 ++++++
 .../GroupPrivilegeResourceProviderTest.java     | 362 +++++++++++++++++++
 9 files changed, 804 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/api/resources/GroupResourceDefinition.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/GroupResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/GroupResourceDefinition.java
index 783e04b..57e5e20 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/GroupResourceDefinition.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/GroupResourceDefinition.java
@@ -44,6 +44,7 @@ public class GroupResourceDefinition extends BaseResourceDefinition {
   public Set<SubResourceDefinition> getSubResourceDefinitions() {
     final Set<SubResourceDefinition> subResourceDefinitions = new HashSet<SubResourceDefinition>();
     subResourceDefinitions.add(new SubResourceDefinition(Resource.Type.Member));
+    subResourceDefinitions.add(new SubResourceDefinition(Resource.Type.GroupPrivilege));
     return subResourceDefinitions;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
index 4c12094..b0160b9 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
@@ -297,6 +297,10 @@ public class ResourceInstanceFactoryImpl implements ResourceInstanceFactory {
         resourceDefinition = new PrivilegeResourceDefinition(Resource.Type.UserPrivilege);
         break;
 
+      case GroupPrivilege:
+        resourceDefinition = new PrivilegeResourceDefinition(Resource.Type.GroupPrivilege);
+        break;
+
       case ViewPermission:
         resourceDefinition = new ViewPermissionResourceDefinition();
         break;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupPrivilegeService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupPrivilegeService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupPrivilegeService.java
new file mode 100644
index 0000000..290d488
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupPrivilegeService.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.controller.spi.Resource;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ *  Service responsible for group privilege resource requests.
+ */
+public class GroupPrivilegeService extends PrivilegeService {
+
+  private final String groupName;
+
+  public GroupPrivilegeService(String groupName) {
+    this.groupName = groupName;
+  }
+
+  // ----- PrivilegeService --------------------------------------------------
+
+  @Override
+  public Response createPrivilege(String body, HttpHeaders headers, UriInfo ui) {
+    return Response.status(HttpServletResponse.SC_NOT_IMPLEMENTED).build();
+  }
+
+  @Override
+  public Response updatePrivilege(String body, HttpHeaders headers, UriInfo ui, String privilegeId) {
+    return Response.status(HttpServletResponse.SC_NOT_IMPLEMENTED).build();
+  }
+
+  @Override
+  public Response updatePrivileges(String body, HttpHeaders headers, UriInfo ui) {
+    return Response.status(HttpServletResponse.SC_NOT_IMPLEMENTED).build();
+  }
+
+  @Override
+  public Response deletePrivilege(HttpHeaders headers, UriInfo ui, String privilegeId) {
+    return Response.status(HttpServletResponse.SC_NOT_IMPLEMENTED).build();
+  }
+
+  @Override
+  public Response deletePrivileges(String body, HttpHeaders headers, UriInfo ui) {
+    return Response.status(HttpServletResponse.SC_NOT_IMPLEMENTED).build();
+  }
+
+  @Override
+  protected ResourceInstance createPrivilegeResource(String privilegeId) {
+    final Map<Resource.Type, String> mapIds = new HashMap<Resource.Type, String>();
+    mapIds.put(Resource.Type.Group, groupName);
+    mapIds.put(Resource.Type.GroupPrivilege, privilegeId);
+    return createResource(Resource.Type.GroupPrivilege, mapIds);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupService.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupService.java
index ee71719..7ecd87f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/GroupService.java
@@ -131,6 +131,17 @@ public class GroupService extends BaseService {
   }
 
   /**
+   * Gets the group privilege service
+   */
+  @Path("{groupName}/privileges")
+  public PrivilegeService getPrivilegeService(@Context javax.ws.rs.core.Request request,
+                                              @PathParam ("groupName") String groupName) {
+
+    return new GroupPrivilegeService(groupName);
+  }
+
+
+  /**
    * Create a group resource instance.
    *
    * @param groupName group name

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
index da334b2..d1d3fe6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/DefaultProviderModule.java
@@ -84,6 +84,8 @@ public class DefaultProviderModule extends AbstractProviderModule {
         return new LdapSyncEventResourceProvider(managementController);
       case UserPrivilege:
         return new UserPrivilegeResourceProvider();
+      case GroupPrivilege:
+        return new GroupPrivilegeResourceProvider();
       case Alert:
         return new AlertResourceProvider(managementController);
       case AlertDefinition:

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProvider.java
new file mode 100644
index 0000000..c853514
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProvider.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.StaticallyInject;
+import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
+import org.apache.ambari.server.controller.spi.NoSuchResourceException;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.spi.UnsupportedPropertyException;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.dao.GroupDAO;
+import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
+import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.ambari.server.orm.entities.GroupEntity;
+import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
+import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.apache.ambari.server.orm.entities.ViewEntity;
+import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
+import org.apache.ambari.server.security.authorization.AuthorizationException;
+import org.apache.ambari.server.security.authorization.AuthorizationHelper;
+import org.apache.ambari.server.security.authorization.ResourceType;
+import org.apache.ambari.server.security.authorization.RoleAuthorization;
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Resource provider for group privilege resources.
+ */
+@StaticallyInject
+public class GroupPrivilegeResourceProvider extends ReadOnlyResourceProvider {
+
+  protected static final String PRIVILEGE_PRIVILEGE_ID_PROPERTY_ID = PrivilegeResourceProvider.PRIVILEGE_ID_PROPERTY_ID;
+  protected static final String PRIVILEGE_PERMISSION_NAME_PROPERTY_ID = PrivilegeResourceProvider.PERMISSION_NAME_PROPERTY_ID;
+  protected static final String PRIVILEGE_PERMISSION_LABEL_PROPERTY_ID = PrivilegeResourceProvider.PERMISSION_LABEL_PROPERTY_ID;
+  protected static final String PRIVILEGE_PRINCIPAL_NAME_PROPERTY_ID = PrivilegeResourceProvider.PRINCIPAL_NAME_PROPERTY_ID;
+  protected static final String PRIVILEGE_PRINCIPAL_TYPE_PROPERTY_ID = PrivilegeResourceProvider.PRINCIPAL_TYPE_PROPERTY_ID;
+  protected static final String PRIVILEGE_VIEW_NAME_PROPERTY_ID = ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_NAME_PROPERTY_ID;
+  protected static final String PRIVILEGE_VIEW_VERSION_PROPERTY_ID = ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_VERSION_PROPERTY_ID;
+  protected static final String PRIVILEGE_INSTANCE_NAME_PROPERTY_ID = ViewPrivilegeResourceProvider.PRIVILEGE_INSTANCE_NAME_PROPERTY_ID;
+  protected static final String PRIVILEGE_CLUSTER_NAME_PROPERTY_ID = ClusterPrivilegeResourceProvider.PRIVILEGE_CLUSTER_NAME_PROPERTY_ID;
+  protected static final String PRIVILEGE_TYPE_PROPERTY_ID = AmbariPrivilegeResourceProvider.PRIVILEGE_TYPE_PROPERTY_ID;
+  protected static final String PRIVILEGE_GROUP_NAME_PROPERTY_ID = "PrivilegeInfo/group_name";
+
+  /**
+   * Data access object used to obtain cluster entities.
+   */
+  @Inject
+  protected static ClusterDAO clusterDAO;
+
+  /**
+   * Data access object used to obtain group entities.
+   */
+  @Inject
+  protected static GroupDAO groupDAO;
+
+  /**
+   * Data access object used to obtain view instance entities.
+   */
+  @Inject
+  protected static ViewInstanceDAO viewInstanceDAO;
+
+  /**
+   * The property ids for a privilege resource.
+   */
+  private static Set<String> propertyIds = new HashSet<String>();
+
+  static {
+    propertyIds.add(PRIVILEGE_PRIVILEGE_ID_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_PERMISSION_NAME_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_PERMISSION_LABEL_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_PRINCIPAL_NAME_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_PRINCIPAL_TYPE_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_VIEW_NAME_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_VIEW_VERSION_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_INSTANCE_NAME_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_CLUSTER_NAME_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_TYPE_PROPERTY_ID);
+    propertyIds.add(PRIVILEGE_GROUP_NAME_PROPERTY_ID);
+  }
+
+  /**
+   * Static initialization.
+   *
+   * @param clusterDAO      the cluster data access object
+   * @param groupDAO        the group data access object
+   * @param viewInstanceDAO the view instance data access object
+   */
+  public static void init(ClusterDAO clusterDAO, GroupDAO groupDAO,
+                          ViewInstanceDAO viewInstanceDAO) {
+    GroupPrivilegeResourceProvider.clusterDAO = clusterDAO;
+    GroupPrivilegeResourceProvider.groupDAO = groupDAO;
+    GroupPrivilegeResourceProvider.viewInstanceDAO = viewInstanceDAO;
+  }
+
+  @SuppressWarnings("serial")
+  private static Set<String> pkPropertyIds = new HashSet<String>() {
+    {
+      add(PRIVILEGE_PRIVILEGE_ID_PROPERTY_ID);
+    }
+  };
+
+  /**
+   * The key property ids for a privilege resource.
+   */
+  private static Map<Resource.Type, String> keyPropertyIds = new HashMap<Resource.Type, String>();
+
+  static {
+    keyPropertyIds.put(Resource.Type.Group, PRIVILEGE_GROUP_NAME_PROPERTY_ID);
+    keyPropertyIds.put(Resource.Type.GroupPrivilege, PRIVILEGE_PRIVILEGE_ID_PROPERTY_ID);
+  }
+
+
+  /**
+   * Constructor.
+   */
+  public GroupPrivilegeResourceProvider() {
+    super(propertyIds, keyPropertyIds, null);
+
+    EnumSet<RoleAuthorization> requiredAuthorizations = EnumSet.of(RoleAuthorization.AMBARI_ASSIGN_ROLES);
+    setRequiredCreateAuthorizations(requiredAuthorizations);
+    setRequiredDeleteAuthorizations(requiredAuthorizations);
+    setRequiredGetAuthorizations(requiredAuthorizations);
+    setRequiredUpdateAuthorizations(requiredAuthorizations);
+  }
+
+  // ----- PrivilegeResourceProvider -----------------------------------------
+
+  @Override
+  protected Set<String> getPKPropertyIds() {
+    return pkPropertyIds;
+  }
+
+  @Override
+  public Set<Resource> getResources(Request request, Predicate predicate)
+      throws SystemException, UnsupportedPropertyException,
+      NoSuchResourceException, NoSuchParentResourceException {
+    final Set<Resource> resources = new HashSet<Resource>();
+    final Set<String> requestedIds = getRequestPropertyIds(request, predicate);
+
+    // Ensure that the authenticated user has authorization to get this information
+    if (!AuthorizationHelper.isAuthorized(ResourceType.AMBARI, null, RoleAuthorization.AMBARI_MANAGE_GROUPS)) {
+      throw new AuthorizationException();
+    }
+
+    for (Map<String, Object> propertyMap : getPropertyMaps(predicate)) {
+      final String groupName = (String) propertyMap.get(PRIVILEGE_GROUP_NAME_PROPERTY_ID);
+
+      if (groupName != null) {
+        GroupEntity groupEntity = groupDAO.findGroupByName(groupName);
+
+        if (groupEntity == null) {
+          throw new SystemException("Group " + groupName + " was not found");
+        }
+
+        final Set<PrivilegeEntity> privileges = groupEntity.getPrincipal().getPrivileges();
+        for (PrivilegeEntity privilegeEntity : privileges) {
+          resources.add(toResource(privilegeEntity, groupName, requestedIds));
+        }
+      }
+    }
+
+    return resources;
+  }
+
+  /**
+   * Translate the found data into a Resource
+   *
+   * @param privilegeEntity the privilege data
+   * @param groupName        the group name
+   * @param requestedIds    the relevant request ids
+   * @return a resource
+   */
+  protected Resource toResource(PrivilegeEntity privilegeEntity, Object groupName, Set<String> requestedIds) {
+    final ResourceImpl resource = new ResourceImpl(Resource.Type.GroupPrivilege);
+
+    setResourceProperty(resource, PRIVILEGE_GROUP_NAME_PROPERTY_ID, groupName, requestedIds);
+    setResourceProperty(resource, PRIVILEGE_PRIVILEGE_ID_PROPERTY_ID, privilegeEntity.getId(), requestedIds);
+    setResourceProperty(resource, PRIVILEGE_PERMISSION_NAME_PROPERTY_ID, privilegeEntity.getPermission().getPermissionName(), requestedIds);
+    setResourceProperty(resource, PRIVILEGE_PERMISSION_LABEL_PROPERTY_ID, privilegeEntity.getPermission().getPermissionLabel(), requestedIds);
+    setResourceProperty(resource, PRIVILEGE_PRINCIPAL_TYPE_PROPERTY_ID, privilegeEntity.getPrincipal().getPrincipalType().getName(), requestedIds);
+
+    final String principalTypeName = privilegeEntity.getPrincipal().getPrincipalType().getName();
+    if (principalTypeName.equals(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE_NAME)) {
+      final GroupEntity groupEntity = groupDAO.findGroupByPrincipal(privilegeEntity.getPrincipal());
+      setResourceProperty(resource, PRIVILEGE_PRINCIPAL_NAME_PROPERTY_ID, groupEntity.getGroupName(), requestedIds);
+    }
+
+    String typeName = privilegeEntity.getResource().getResourceType().getName();
+    ResourceType resourceType = ResourceType.translate(typeName);
+    if (resourceType != null) {
+      switch (resourceType) {
+        case AMBARI:
+          // there is nothing special to add for this case
+          break;
+        case CLUSTER:
+          final ClusterEntity clusterEntity = clusterDAO.findByResourceId(privilegeEntity.getResource().getId());
+          setResourceProperty(resource, PRIVILEGE_CLUSTER_NAME_PROPERTY_ID, clusterEntity.getClusterName(), requestedIds);
+          break;
+        case VIEW:
+          final ViewInstanceEntity viewInstanceEntity = viewInstanceDAO.findByResourceId(privilegeEntity.getResource().getId());
+          final ViewEntity viewEntity = viewInstanceEntity.getViewEntity();
+
+          setResourceProperty(resource, PRIVILEGE_VIEW_NAME_PROPERTY_ID, viewEntity.getCommonName(), requestedIds);
+          setResourceProperty(resource, PRIVILEGE_VIEW_VERSION_PROPERTY_ID, viewEntity.getVersion(), requestedIds);
+          setResourceProperty(resource, PRIVILEGE_INSTANCE_NAME_PROPERTY_ID, viewInstanceEntity.getName(), requestedIds);
+          break;
+      }
+
+      setResourceProperty(resource, PRIVILEGE_TYPE_PROPERTY_ID, resourceType.name(), requestedIds);
+    }
+
+    return resource;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
index f5fa5f1..db41f10 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
@@ -130,6 +130,7 @@ public interface Resource {
     StackLevelConfiguration,
     LdapSyncEvent,
     UserPrivilege,
+    GroupPrivilege,
     RepositoryVersion,
     CompatibleRepositoryVersion,
     ClusterStackVersion,
@@ -241,6 +242,7 @@ public interface Resource {
     public static final Type StackLevelConfiguration = InternalType.StackLevelConfiguration.getType();
     public static final Type LdapSyncEvent = InternalType.LdapSyncEvent.getType();
     public static final Type UserPrivilege = InternalType.UserPrivilege.getType();
+    public static final Type GroupPrivilege = InternalType.GroupPrivilege.getType();
     public static final Type RepositoryVersion = InternalType.RepositoryVersion.getType();
     public static final Type CompatibleRepositoryVersion = InternalType.CompatibleRepositoryVersion.getType();
     public static final Type ClusterStackVersion = InternalType.ClusterStackVersion.getType();

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/test/java/org/apache/ambari/server/api/services/GroupPrivilegeServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/GroupPrivilegeServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/GroupPrivilegeServiceTest.java
new file mode 100644
index 0000000..7d2188f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/GroupPrivilegeServiceTest.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.api.services;
+
+import junit.framework.Assert;
+import org.apache.ambari.server.api.resources.ResourceInstance;
+import org.apache.ambari.server.api.services.parsers.RequestBodyParser;
+import org.apache.ambari.server.api.services.serializers.ResultSerializer;
+import org.apache.ambari.server.controller.spi.Resource.Type;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Unit tests for GroupService.
+ */
+public class GroupPrivilegeServiceTest extends BaseServiceTest {
+
+  public List<ServiceTestInvocation> getTestInvocations() throws Exception {
+    List<ServiceTestInvocation> listInvocations = new ArrayList<ServiceTestInvocation>();
+
+    GroupPrivilegeService groupPrivilegeService;
+    Method m;
+    Object[] args;
+
+    //getPrivilege
+    groupPrivilegeService = new TestGroupPrivilegeService();
+    m = groupPrivilegeService.getClass().getMethod("getPrivilege", HttpHeaders.class, UriInfo.class, String.class);
+    args = new Object[] {getHttpHeaders(), getUriInfo(), "id"};
+    listInvocations.add(new ServiceTestInvocation(Request.Type.GET, groupPrivilegeService, m, args, null));
+
+    //getPrivileges
+    groupPrivilegeService = new TestGroupPrivilegeService();
+    m = groupPrivilegeService.getClass().getMethod("getPrivileges", HttpHeaders.class, UriInfo.class);
+    args = new Object[] {getHttpHeaders(), getUriInfo()};
+    listInvocations.add(new ServiceTestInvocation(Request.Type.GET, groupPrivilegeService, m, args, null));
+
+    return listInvocations;
+  }
+
+  @Test
+  public void testDisabledMethods() {
+    final HttpHeaders headers = EasyMock.createNiceMock(HttpHeaders.class);
+    final UriInfo uriInfo = EasyMock.createNiceMock(UriInfo.class);
+    final GroupPrivilegeService service = new TestGroupPrivilegeService();
+
+    final List<Response> disabledMethods = new ArrayList<Response>();
+    disabledMethods.add(service.createPrivilege("test", headers, uriInfo));
+    disabledMethods.add(service.updatePrivilege("test", headers, uriInfo, "test"));
+    disabledMethods.add(service.updatePrivileges("test", headers, uriInfo));
+    disabledMethods.add(service.deletePrivilege(headers, uriInfo, "test"));
+    disabledMethods.add(service.deletePrivileges("test", headers, uriInfo));
+
+    for (Response response: disabledMethods) {
+      Assert.assertEquals(HttpServletResponse.SC_NOT_IMPLEMENTED, response.getStatus());
+    }
+  }
+
+  private class TestGroupPrivilegeService extends GroupPrivilegeService {
+
+    public TestGroupPrivilegeService() {
+      super("group");
+    }
+
+    @Override
+    protected ResourceInstance createResource(Type type, Map<Type, String> mapIds) {
+      return getTestResource();
+    }
+
+    @Override
+    RequestFactory getRequestFactory() {
+      return getTestRequestFactory();
+    }
+
+    @Override
+    protected RequestBodyParser getBodyParser() {
+      return getTestBodyParser();
+    }
+
+    @Override
+    protected ResultSerializer getResultSerializer() {
+      return getTestResultSerializer();
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/5a9bb715/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
new file mode 100644
index 0000000..205f467
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
@@ -0,0 +1,362 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.internal;
+
+import junit.framework.Assert;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.SystemException;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.orm.dao.ClusterDAO;
+import org.apache.ambari.server.orm.dao.GroupDAO;
+import org.apache.ambari.server.orm.dao.ViewInstanceDAO;
+import org.apache.ambari.server.orm.entities.ClusterEntity;
+import org.apache.ambari.server.orm.entities.MemberEntity;
+import org.apache.ambari.server.orm.entities.PermissionEntity;
+import org.apache.ambari.server.orm.entities.PrincipalEntity;
+import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
+import org.apache.ambari.server.orm.entities.PrivilegeEntity;
+import org.apache.ambari.server.orm.entities.ResourceEntity;
+import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
+import org.apache.ambari.server.orm.entities.GroupEntity;
+import org.apache.ambari.server.orm.entities.ViewEntity;
+import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
+import org.apache.ambari.server.security.TestAuthenticationFactory;
+import org.apache.ambari.server.security.authorization.AuthorizationException;
+import org.apache.ambari.server.security.authorization.ResourceType;
+import org.easymock.EasyMockSupport;
+import org.junit.Test;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+
+/**
+ * GroupPrivilegeResourceProvider tests.
+ */
+public class GroupPrivilegeResourceProviderTest extends EasyMockSupport {
+
+  @Test(expected = SystemException.class)
+  public void testCreateResources() throws Exception {
+    SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createClusterAdministrator("user1", 2L));
+    GroupPrivilegeResourceProvider resourceProvider = new GroupPrivilegeResourceProvider();
+    resourceProvider.createResources(createNiceMock(Request.class));
+  }
+
+  @Test
+  public void testGetResources_Administrator() throws Exception {
+    getResourcesTest(TestAuthenticationFactory.createAdministrator("admin"), "Group1");
+  }
+
+  @Test(expected = AuthorizationException.class)
+  public void testGetResources_NonAdministrator() throws Exception {
+    getResourcesTest(TestAuthenticationFactory.createClusterAdministrator("user1", 2L), "Group1");
+  }
+  
+  @Test(expected = SystemException.class)
+  public void testUpdateResources() throws Exception {
+    SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createClusterAdministrator("user1", 2L));
+    GroupPrivilegeResourceProvider resourceProvider = new GroupPrivilegeResourceProvider();
+    resourceProvider.updateResources(createNiceMock(Request.class), createNiceMock(Predicate.class));
+  }
+
+  @Test(expected = SystemException.class)
+  public void testDeleteResources() throws Exception {
+    SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createClusterAdministrator("user1", 2L));
+    GroupPrivilegeResourceProvider resourceProvider = new GroupPrivilegeResourceProvider();
+    resourceProvider.deleteResources(createNiceMock(Predicate.class));
+  }
+
+  @Test
+  public void testToResource_AMBARI() {
+    PermissionEntity permissionEntity = createMock(PermissionEntity.class);
+    expect(permissionEntity.getPermissionName()).andReturn("ADMINISTRATOR").atLeastOnce();
+    expect(permissionEntity.getPermissionLabel()).andReturn("Administrator").atLeastOnce();
+
+    PrincipalTypeEntity principalTypeEntity = createMock(PrincipalTypeEntity.class);
+    expect(principalTypeEntity.getName()).andReturn("GROUP").atLeastOnce();
+
+    PrincipalEntity principalEntity = createMock(PrincipalEntity.class);
+    expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).atLeastOnce();
+
+    ResourceTypeEntity resourceTypeEntity = createMock(ResourceTypeEntity.class);
+    expect(resourceTypeEntity.getName()).andReturn("AMBARI").atLeastOnce();
+
+    ResourceEntity resourceEntity = createMock(ResourceEntity.class);
+    expect(resourceEntity.getResourceType()).andReturn(resourceTypeEntity).atLeastOnce();
+
+    PrivilegeEntity privilegeEntity = createMock(PrivilegeEntity.class);
+    expect(privilegeEntity.getId()).andReturn(1).atLeastOnce();
+    expect(privilegeEntity.getPermission()).andReturn(permissionEntity).atLeastOnce();
+    expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).atLeastOnce();
+    expect(privilegeEntity.getResource()).andReturn(resourceEntity).atLeastOnce();
+
+    GroupEntity groupEntity = createMock(GroupEntity.class);
+    expect(groupEntity.getGroupName()).andReturn("group1").atLeastOnce();
+
+    GroupDAO groupDAO = createMock(GroupDAO.class);
+    expect(groupDAO.findGroupByPrincipal(anyObject(PrincipalEntity.class))).andReturn(groupEntity).anyTimes();
+
+    ClusterDAO clusterDAO = createMock(ClusterDAO.class);
+    ViewInstanceDAO viewInstanceDAO = createMock(ViewInstanceDAO.class);
+
+    replayAll();
+
+    GroupPrivilegeResourceProvider.init(clusterDAO, groupDAO, viewInstanceDAO);
+    GroupPrivilegeResourceProvider provider = new GroupPrivilegeResourceProvider();
+    Resource resource = provider.toResource(privilegeEntity, "group1", provider.getPropertyIds());
+
+    Assert.assertEquals(ResourceType.AMBARI.name(), resource.getPropertyValue(GroupPrivilegeResourceProvider.PRIVILEGE_TYPE_PROPERTY_ID));
+
+    verifyAll();
+  }
+
+  @Test
+  public void testToResource_CLUSTER() {
+    PermissionEntity permissionEntity = createMock(PermissionEntity.class);
+    expect(permissionEntity.getPermissionName()).andReturn("CLUSTER.ADMINISTRATOR").atLeastOnce();
+    expect(permissionEntity.getPermissionLabel()).andReturn("Cluster Administrator").atLeastOnce();
+
+    PrincipalTypeEntity principalTypeEntity = createMock(PrincipalTypeEntity.class);
+    expect(principalTypeEntity.getName()).andReturn("GROUP").atLeastOnce();
+
+    PrincipalEntity principalEntity = createMock(PrincipalEntity.class);
+    expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).atLeastOnce();
+
+    ClusterEntity clusterEntity = createMock(ClusterEntity.class);
+    expect(clusterEntity.getClusterName()).andReturn("TestCluster").atLeastOnce();
+
+    ResourceTypeEntity resourceTypeEntity = createMock(ResourceTypeEntity.class);
+    expect(resourceTypeEntity.getName()).andReturn("CLUSTER").atLeastOnce();
+
+    ResourceEntity resourceEntity = createMock(ResourceEntity.class);
+    expect(resourceEntity.getId()).andReturn(1L).atLeastOnce();
+    expect(resourceEntity.getResourceType()).andReturn(resourceTypeEntity).atLeastOnce();
+
+    PrivilegeEntity privilegeEntity = createMock(PrivilegeEntity.class);
+    expect(privilegeEntity.getId()).andReturn(1).atLeastOnce();
+    expect(privilegeEntity.getPermission()).andReturn(permissionEntity).atLeastOnce();
+    expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).atLeastOnce();
+    expect(privilegeEntity.getResource()).andReturn(resourceEntity).atLeastOnce();
+
+    GroupEntity groupEntity = createMock(GroupEntity.class);
+    expect(groupEntity.getGroupName()).andReturn("group1").atLeastOnce();
+
+    ClusterDAO clusterDAO = createMock(ClusterDAO.class);
+    expect(clusterDAO.findByResourceId(1L)).andReturn(clusterEntity).atLeastOnce();
+
+    ViewInstanceDAO viewInstanceDAO = createMock(ViewInstanceDAO.class);
+
+    GroupDAO groupDAO = createMock(GroupDAO.class);
+    expect(groupDAO.findGroupByPrincipal(anyObject(PrincipalEntity.class))).andReturn(groupEntity).anyTimes();
+
+    replayAll();
+
+    GroupPrivilegeResourceProvider.init(clusterDAO, groupDAO, viewInstanceDAO);
+    GroupPrivilegeResourceProvider provider = new GroupPrivilegeResourceProvider();
+    Resource resource = provider.toResource(privilegeEntity, "group1", provider.getPropertyIds());
+
+    Assert.assertEquals("TestCluster", resource.getPropertyValue(ClusterPrivilegeResourceProvider.PRIVILEGE_CLUSTER_NAME_PROPERTY_ID));
+    Assert.assertEquals(ResourceType.CLUSTER.name(), resource.getPropertyValue(GroupPrivilegeResourceProvider.PRIVILEGE_TYPE_PROPERTY_ID));
+
+    verifyAll();
+  }
+
+  @Test
+  public void testToResource_VIEW() {
+    PermissionEntity permissionEntity = createMock(PermissionEntity.class);
+    expect(permissionEntity.getPermissionName()).andReturn("CLUSTER.ADMINISTRATOR").atLeastOnce();
+    expect(permissionEntity.getPermissionLabel()).andReturn("Cluster Administrator").atLeastOnce();
+
+    PrincipalTypeEntity principalTypeEntity = createMock(PrincipalTypeEntity.class);
+    expect(principalTypeEntity.getName()).andReturn("GROUP").atLeastOnce();
+
+    PrincipalEntity principalEntity = createMock(PrincipalEntity.class);
+    expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).atLeastOnce();
+
+    ViewEntity viewEntity = createMock(ViewEntity.class);
+    expect(viewEntity.getCommonName()).andReturn("TestView").atLeastOnce();
+    expect(viewEntity.getVersion()).andReturn("1.2.3.4").atLeastOnce();
+
+    ViewInstanceEntity viewInstanceEntity = createMock(ViewInstanceEntity.class);
+    expect(viewInstanceEntity.getViewEntity()).andReturn(viewEntity).atLeastOnce();
+    expect(viewInstanceEntity.getName()).andReturn("Test View").atLeastOnce();
+
+    ResourceTypeEntity resourceTypeEntity = createMock(ResourceTypeEntity.class);
+    expect(resourceTypeEntity.getName()).andReturn("VIEW").atLeastOnce();
+
+    ResourceEntity resourceEntity = createMock(ResourceEntity.class);
+    expect(resourceEntity.getId()).andReturn(1L).atLeastOnce();
+    expect(resourceEntity.getResourceType()).andReturn(resourceTypeEntity).atLeastOnce();
+
+    PrivilegeEntity privilegeEntity = createMock(PrivilegeEntity.class);
+    expect(privilegeEntity.getId()).andReturn(1).atLeastOnce();
+    expect(privilegeEntity.getPermission()).andReturn(permissionEntity).atLeastOnce();
+    expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).atLeastOnce();
+    expect(privilegeEntity.getResource()).andReturn(resourceEntity).atLeastOnce();
+
+    GroupEntity groupEntity = createMock(GroupEntity.class);
+    expect(groupEntity.getGroupName()).andReturn("group1").atLeastOnce();
+
+    ClusterDAO clusterDAO = createMock(ClusterDAO.class);
+    
+    ViewInstanceDAO viewInstanceDAO = createMock(ViewInstanceDAO.class);
+    expect(viewInstanceDAO.findByResourceId(1L)).andReturn(viewInstanceEntity).atLeastOnce();
+
+    GroupDAO groupDAO = createMock(GroupDAO.class);
+    expect(groupDAO.findGroupByPrincipal(anyObject(PrincipalEntity.class))).andReturn(groupEntity).anyTimes();
+
+    replayAll();
+
+    GroupPrivilegeResourceProvider.init(clusterDAO, groupDAO, viewInstanceDAO);
+    GroupPrivilegeResourceProvider provider = new GroupPrivilegeResourceProvider();
+    Resource resource = provider.toResource(privilegeEntity, "group1", provider.getPropertyIds());
+
+    Assert.assertEquals("Test View", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_INSTANCE_NAME_PROPERTY_ID));
+    Assert.assertEquals("TestView", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_NAME_PROPERTY_ID));
+    Assert.assertEquals("1.2.3.4", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_VERSION_PROPERTY_ID));
+    Assert.assertEquals(ResourceType.VIEW.name(), resource.getPropertyValue(GroupPrivilegeResourceProvider.PRIVILEGE_TYPE_PROPERTY_ID));
+
+    verifyAll();
+  }
+
+  @Test
+  public void testToResource_SpecificVIEW() {
+    PermissionEntity permissionEntity = createMock(PermissionEntity.class);
+    expect(permissionEntity.getPermissionName()).andReturn("CLUSTER.ADMINISTRATOR").atLeastOnce();
+    expect(permissionEntity.getPermissionLabel()).andReturn("Cluster Administrator").atLeastOnce();
+
+    PrincipalTypeEntity principalTypeEntity = createMock(PrincipalTypeEntity.class);
+    expect(principalTypeEntity.getName()).andReturn("GROUP").atLeastOnce();
+
+    PrincipalEntity principalEntity = createMock(PrincipalEntity.class);
+    expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).atLeastOnce();
+
+    ViewEntity viewEntity = createMock(ViewEntity.class);
+    expect(viewEntity.getCommonName()).andReturn("TestView").atLeastOnce();
+    expect(viewEntity.getVersion()).andReturn("1.2.3.4").atLeastOnce();
+
+    ViewInstanceEntity viewInstanceEntity = createMock(ViewInstanceEntity.class);
+    expect(viewInstanceEntity.getViewEntity()).andReturn(viewEntity).atLeastOnce();
+    expect(viewInstanceEntity.getName()).andReturn("Test View").atLeastOnce();
+
+    ResourceTypeEntity resourceTypeEntity = createMock(ResourceTypeEntity.class);
+    expect(resourceTypeEntity.getName()).andReturn("TestView{1.2.3.4}").atLeastOnce();
+
+    ResourceEntity resourceEntity = createMock(ResourceEntity.class);
+    expect(resourceEntity.getId()).andReturn(1L).atLeastOnce();
+    expect(resourceEntity.getResourceType()).andReturn(resourceTypeEntity).atLeastOnce();
+
+    PrivilegeEntity privilegeEntity = createMock(PrivilegeEntity.class);
+    expect(privilegeEntity.getId()).andReturn(1).atLeastOnce();
+    expect(privilegeEntity.getPermission()).andReturn(permissionEntity).atLeastOnce();
+    expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).atLeastOnce();
+    expect(privilegeEntity.getResource()).andReturn(resourceEntity).atLeastOnce();
+
+    GroupEntity groupEntity = createMock(GroupEntity.class);
+    expect(groupEntity.getGroupName()).andReturn("group1").atLeastOnce();
+
+    ClusterDAO clusterDAO = createMock(ClusterDAO.class);
+
+    ViewInstanceDAO viewInstanceDAO = createMock(ViewInstanceDAO.class);
+    expect(viewInstanceDAO.findByResourceId(1L)).andReturn(viewInstanceEntity).atLeastOnce();
+
+    GroupDAO groupDAO = createMock(GroupDAO.class);
+    expect(groupDAO.findGroupByPrincipal(anyObject(PrincipalEntity.class))).andReturn(groupEntity).anyTimes();
+
+    replayAll();
+
+    GroupPrivilegeResourceProvider.init(clusterDAO, groupDAO, viewInstanceDAO);
+    GroupPrivilegeResourceProvider provider = new GroupPrivilegeResourceProvider();
+    Resource resource = provider.toResource(privilegeEntity, "group1", provider.getPropertyIds());
+
+    Assert.assertEquals("Test View", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_INSTANCE_NAME_PROPERTY_ID));
+    Assert.assertEquals("TestView", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_NAME_PROPERTY_ID));
+    Assert.assertEquals("1.2.3.4", resource.getPropertyValue(ViewPrivilegeResourceProvider.PRIVILEGE_VIEW_VERSION_PROPERTY_ID));
+    Assert.assertEquals(ResourceType.VIEW.name(), resource.getPropertyValue(GroupPrivilegeResourceProvider.PRIVILEGE_TYPE_PROPERTY_ID));
+
+    verifyAll();
+  }
+
+  private void getResourcesTest(Authentication authentication, String requestedGroupName) throws Exception {
+    final GroupPrivilegeResourceProvider resourceProvider = new GroupPrivilegeResourceProvider();
+    final GroupDAO groupDAO = createNiceMock(GroupDAO.class);
+    final ClusterDAO clusterDAO = createNiceMock(ClusterDAO.class);
+    final ViewInstanceDAO viewInstanceDAO = createNiceMock(ViewInstanceDAO.class);
+    final GroupEntity groupEntity = createNiceMock(GroupEntity.class);
+    final PrincipalEntity principalEntity = createNiceMock(PrincipalEntity.class);
+    final PrivilegeEntity privilegeEntity = createNiceMock(PrivilegeEntity.class);
+    final PermissionEntity permissionEntity = createNiceMock(PermissionEntity.class);
+    final PrincipalTypeEntity principalTypeEntity = createNiceMock(PrincipalTypeEntity.class);
+    final ResourceEntity resourceEntity = createNiceMock(ResourceEntity.class);
+    final ResourceTypeEntity resourceTypeEntity = createNiceMock(ResourceTypeEntity.class);
+
+    expect(groupDAO.findGroupByName(requestedGroupName)).andReturn(groupEntity).anyTimes();
+    expect(groupEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
+    expect(groupEntity.getMemberEntities()).andReturn(Collections.<MemberEntity>emptySet()).anyTimes();
+    expect(privilegeEntity.getPermission()).andReturn(permissionEntity).anyTimes();
+    expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
+    expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).anyTimes();
+    expect(principalTypeEntity.getName()).andReturn(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE_NAME).anyTimes();
+    expect(principalEntity.getPrivileges()).andReturn(new HashSet<PrivilegeEntity>() {
+      {
+        add(privilegeEntity);
+      }
+    }).anyTimes();
+    expect(groupDAO.findGroupByPrincipal(anyObject(PrincipalEntity.class))).andReturn(groupEntity).anyTimes();
+    expect(groupEntity.getGroupName()).andReturn(requestedGroupName).anyTimes();
+    expect(privilegeEntity.getResource()).andReturn(resourceEntity).anyTimes();
+    expect(resourceEntity.getResourceType()).andReturn(resourceTypeEntity).anyTimes();
+    expect(resourceTypeEntity.getName()).andReturn(ResourceType.AMBARI.name());
+
+    replayAll();
+
+    GroupPrivilegeResourceProvider.init(clusterDAO, groupDAO, viewInstanceDAO);
+
+    final Set<String> propertyIds = new HashSet<String>();
+    propertyIds.add(GroupPrivilegeResourceProvider.PRIVILEGE_GROUP_NAME_PROPERTY_ID);
+
+    final Predicate predicate = new PredicateBuilder()
+        .property(GroupPrivilegeResourceProvider.PRIVILEGE_GROUP_NAME_PROPERTY_ID)
+        .equals(requestedGroupName)
+        .toPredicate();
+    Request request = PropertyHelper.getReadRequest(propertyIds);
+
+    // Set the authenticated group to a administrator
+    SecurityContextHolder.getContext().setAuthentication(authentication);
+
+    Set<Resource> resources = resourceProvider.getResources(request, predicate);
+
+    Assert.assertEquals(1, resources.size());
+    for (Resource resource : resources) {
+      String groupName = (String) resource.getPropertyValue(GroupPrivilegeResourceProvider.PRIVILEGE_GROUP_NAME_PROPERTY_ID);
+      Assert.assertEquals(requestedGroupName, groupName);
+    }
+
+    verifyAll();
+  }
+
+}


[32/33] ambari git commit: AMBARI-15085. Enhanced graph widgets aren't updated after getting no data (alexantonenko)

Posted by nc...@apache.org.
AMBARI-15085. Enhanced graph widgets aren't updated after getting no data (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f9271491
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f9271491
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f9271491

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f92714910c74336a6e797056173dc287185f5ed1
Parents: 1bd4c23
Author: Alex Antonenko <hi...@gmail.com>
Authored: Thu Feb 18 12:03:54 2016 +0200
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Thu Feb 18 13:14:49 2016 +0200

----------------------------------------------------------------------
 .../app/mixins/common/widgets/widget_mixin.js   |  8 +++--
 .../test/mixins/common/widget_mixin_test.js     | 35 +++++++++++++++-----
 2 files changed, 33 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f9271491/ambari-web/app/mixins/common/widgets/widget_mixin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js
index 13e55f2..4d777e4 100644
--- a/ambari-web/app/mixins/common/widgets/widget_mixin.js
+++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js
@@ -318,7 +318,9 @@ App.WidgetMixin = Ember.Mixin.create({
             graph.set('hasData', false);
             this.set('isExportButtonHidden', true);
             graph._showMessage('info', this.t('graphs.noData.title'), this.t('graphs.noDataAtTime.message'));
-            this.get('metrics').clear();
+            this.set('metrics', this.get('metrics').reject(function (item) {
+              return this.get('content.metrics').someProperty('name', item.name);
+            }, this));
           }
         }
       }, this);
@@ -341,7 +343,9 @@ App.WidgetMixin = Ember.Mixin.create({
         graph.set('hasData', false);
         this.set('isExportButtonHidden', true);
         graph._showMessage('warn', this.t('graphs.error.title'), this.t('graphs.error.message').format(textStatus, errorThrown));
-        this.get('metrics').clear();
+        this.set('metrics', this.get('metrics').reject(function (item) {
+          return this.get('content.metrics').someProperty('name', item.name);
+        }, this));
       }
     }
   },

http://git-wip-us.apache.org/repos/asf/ambari/blob/f9271491/ambari-web/test/mixins/common/widget_mixin_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/widget_mixin_test.js b/ambari-web/test/mixins/common/widget_mixin_test.js
index c70150d..662f8b8 100644
--- a/ambari-web/test/mixins/common/widget_mixin_test.js
+++ b/ambari-web/test/mixins/common/widget_mixin_test.js
@@ -391,11 +391,19 @@ describe('App.WidgetMixin', function () {
       view = Em.Object.create({
         _showMessage: Em.K
       }),
+      metrics = [
+        {
+          name: 'n0'
+        },
+        {
+          name: 'n1'
+        }
+      ],
       cases = [
         {
           graphView: null,
           isForcedAbort: false,
-          metricsLength: 1,
+          metrics: metrics,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
           title: 'no graph view'
@@ -403,7 +411,7 @@ describe('App.WidgetMixin', function () {
         {
           graphView: {},
           isForcedAbort: false,
-          metricsLength: 1,
+          metrics: metrics,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
           title: 'no childViews property'
@@ -412,7 +420,7 @@ describe('App.WidgetMixin', function () {
           graphView: {},
           childViews: [],
           isForcedAbort: false,
-          metricsLength: 1,
+          metrics: metrics,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
           title: 'no child views'
@@ -421,7 +429,7 @@ describe('App.WidgetMixin', function () {
           graphView: {},
           childViews: [Em.Object.create({})],
           isForcedAbort: false,
-          metricsLength: 1,
+          metrics: metrics,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
           title: 'no view with _showMessage method'
@@ -430,7 +438,11 @@ describe('App.WidgetMixin', function () {
           graphView: {},
           childViews: [Em.Object.create({}), view],
           isForcedAbort: false,
-          metricsLength: 0,
+          metrics: [
+            {
+              name: 'n1'
+            }
+          ],
           showMessageCallCount: 1,
           isExportButtonHidden: true,
           title: 'graph view is available'
@@ -439,7 +451,7 @@ describe('App.WidgetMixin', function () {
           graphView: {},
           childViews: [Em.Object.create({}), view],
           isForcedAbort: true,
-          metricsLength: 1,
+          metrics: metrics,
           showMessageCallCount: 0,
           isExportButtonHidden: false,
           title: 'request is aborted'
@@ -480,7 +492,14 @@ describe('App.WidgetMixin', function () {
 
         beforeEach(function () {
           obj = Em.Object.create(App.WidgetMixin, {
-            metrics: [{}],
+            metrics: metrics,
+            content: {
+              metrics: [
+                {
+                  name: 'n0'
+                }
+              ]
+            },
             isExportButtonHidden: false,
             graphView: item.graphView,
             childViews: item.childViews
@@ -491,7 +510,7 @@ describe('App.WidgetMixin', function () {
         });
 
         it('metrics array', function () {
-          expect(obj.get('metrics')).to.have.length(item.metricsLength);
+          expect(obj.get('metrics')).to.eql(item.metrics);
         });
 
         it('error message', function () {


[20/33] ambari git commit: AMBARI-15020: HAWQ service check fails when PXF is present due to cmd syntax error (bhuvnesh2703 via jaoki)

Posted by nc...@apache.org.
AMBARI-15020: HAWQ service check fails when PXF is present due to cmd syntax error (bhuvnesh2703 via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c282a0ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c282a0ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c282a0ee

Branch: refs/heads/branch-dev-patch-upgrade
Commit: c282a0eec34809290be93a4c08f6686948ff896f
Parents: 4124ae0
Author: Jun Aoki <ja...@apache.org>
Authored: Wed Feb 17 11:12:57 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Wed Feb 17 11:12:57 2016 -0800

----------------------------------------------------------------------
 .../common-services/HAWQ/2.0.0/package/scripts/utils.py   | 10 +++-------
 1 file changed, 3 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c282a0ee/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
index 51a38d1..f40e982 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
@@ -79,12 +79,8 @@ def exec_ssh_cmd(hostname, cmd):
   """
   Runs the command on the remote host as gpadmin user
   """
-  import params
   # Only gpadmin should be allowed to run command via ssh, thus not exposing user as a parameter
-  if params.hostname != hostname:
-    cmd = "su - {0} -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {1} \"{2} \" '".format(hawq_constants.hawq_user, hostname, cmd)
-  else:
-    cmd = "su - {0} -c \"{1}\"".format(hawq_constants.hawq_user, cmd)
+  cmd = "su - {0} -c \"ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {1} \\\"{2} \\\" \"".format(hawq_constants.hawq_user, hostname, cmd)
   Logger.info("Command executed: {0}".format(cmd))
   process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
   (stdout, stderr) = process.communicate()
@@ -97,9 +93,9 @@ def exec_psql_cmd(command, host, port, db="template1", tuples_only=True):
   """
   src_cmd = "export PGPORT={0} && source {1}".format(port, hawq_constants.hawq_greenplum_path_file)
   if tuples_only:
-    cmd = src_cmd + " && psql -d {0} -c \\\"{1};\\\"".format(db, command)
+    cmd = src_cmd + " && psql -d {0} -c \\\\\\\"{1};\\\\\\\"".format(db, command)
   else:
-    cmd = src_cmd + " && psql -t -d {0} -c \\\"{1};\\\"".format(db, command)
+    cmd = src_cmd + " && psql -t -d {0} -c \\\\\\\"{1};\\\\\\\"".format(db, command)
   retcode, out, err = exec_ssh_cmd(host, cmd)
   if retcode:
     Logger.error("SQL command executed failed: {0}\nReturncode: {1}\nStdout: {2}\nStderr: {3}".format(cmd, retcode, out, err))


[03/33] ambari git commit: Updated version for the website. (yusaku)

Posted by nc...@apache.org.
Updated version for the website. (yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b900d03a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b900d03a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b900d03a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b900d03a9a4bef2292f1ad0857edf6d73f7abb24
Parents: be1b6b9
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Tue Feb 16 10:38:56 2016 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Tue Feb 16 10:38:56 2016 -0800

----------------------------------------------------------------------
 docs/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b900d03a/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index c3bb326..d6e04e3 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -28,7 +28,7 @@
     <modelVersion>4.0.0</modelVersion>
 
     <groupId>org.apache.ambari</groupId>
-    <version>2.2.0</version>
+    <version>2.2.1</version>
     <artifactId>ambari</artifactId>
     <packaging>pom</packaging>
 
@@ -42,7 +42,7 @@
         <package.pid.dir>/var/run/ambari</package.pid.dir>
         <package.release>1</package.release>
         <package.type>tar.gz</package.type>
-        <ambari.version>2.2.0</ambari.version>
+        <ambari.version>2.2.1</ambari.version>
         <final.name>${project.artifactId}-${ambari.version}</final.name>
     </properties>
 


[19/33] ambari git commit: AMBARI-15062. Combo Search: Create auto suggest for Component State filter (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15062. Combo Search: Create auto suggest for Component State filter (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4124ae03
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4124ae03
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4124ae03

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 4124ae032646ca3f7bbea19288da9cba259933e5
Parents: d4b67b1
Author: Richard Zang <rz...@apache.org>
Authored: Wed Feb 17 11:00:59 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Wed Feb 17 11:00:59 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host/combo_search_box.js | 8 +-------
 1 file changed, 1 insertion(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4124ae03/ambari-web/app/controllers/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/combo_search_box.js b/ambari-web/app/controllers/main/host/combo_search_box.js
index d835e97..f4cc50f 100644
--- a/ambari-web/app/controllers/main/host/combo_search_box.js
+++ b/ambari-web/app/controllers/main/host/combo_search_box.js
@@ -88,13 +88,7 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
           callback(App.HostComponent.find().toArray().mapProperty('componentName').uniq(), {preserveOrder: true});
           break;
         case 'state':
-          callback([
-            Em.I18n.t('common.started'),
-            Em.I18n.t('common.stopped'),
-            Em.I18n.t('hosts.host.stackVersions.status.install_failed'),
-            Em.I18n.t('hosts.host.decommissioning'),
-            Em.I18n.t('hosts.host.decommissioned')
-          ], {preserveOrder: true});
+          callback(App.HostComponentStatus.getStatusesList(), {preserveOrder: true});
           break;
       }
     }


[14/33] ambari git commit: AMBARI-15069 Add HDP 2.5 stack for Ambari 2.2.2 (dsen)

Posted by nc...@apache.org.
AMBARI-15069 Add HDP 2.5 stack for Ambari 2.2.2 (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b339542e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b339542e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b339542e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b339542e04290725d3a171afb62e6233950e30b0
Parents: 03f804c
Author: Dmytro Sen <ds...@apache.org>
Authored: Wed Feb 17 16:46:55 2016 +0200
Committer: Dmytro Sen <ds...@apache.org>
Committed: Wed Feb 17 16:46:55 2016 +0200

----------------------------------------------------------------------
 .../main/resources/stacks/HDP/2.5/metainfo.xml  | 25 ++++++
 .../resources/stacks/HDP/2.5/repos/repoinfo.xml | 92 ++++++++++++++++++++
 .../HDP/2.5/services/ACCUMULO/metainfo.xml      | 26 ++++++
 .../stacks/HDP/2.5/services/ATLAS/metainfo.xml  | 26 ++++++
 .../stacks/HDP/2.5/services/FALCON/metainfo.xml | 26 ++++++
 .../stacks/HDP/2.5/services/FLUME/metainfo.xml  | 26 ++++++
 .../stacks/HDP/2.5/services/HBASE/metainfo.xml  | 26 ++++++
 .../stacks/HDP/2.5/services/HDFS/metainfo.xml   | 26 ++++++
 .../stacks/HDP/2.5/services/HIVE/metainfo.xml   | 26 ++++++
 .../stacks/HDP/2.5/services/KAFKA/metainfo.xml  | 26 ++++++
 .../HDP/2.5/services/KERBEROS/metainfo.xml      | 25 ++++++
 .../stacks/HDP/2.5/services/KNOX/metainfo.xml   | 26 ++++++
 .../stacks/HDP/2.5/services/MAHOUT/metainfo.xml | 26 ++++++
 .../stacks/HDP/2.5/services/OOZIE/metainfo.xml  | 26 ++++++
 .../stacks/HDP/2.5/services/PIG/metainfo.xml    | 26 ++++++
 .../stacks/HDP/2.5/services/RANGER/metainfo.xml | 29 ++++++
 .../HDP/2.5/services/RANGER_KMS/metainfo.xml    | 29 ++++++
 .../stacks/HDP/2.5/services/SLIDER/metainfo.xml | 26 ++++++
 .../stacks/HDP/2.5/services/SPARK/metainfo.xml  | 29 ++++++
 .../stacks/HDP/2.5/services/SQOOP/metainfo.xml  | 26 ++++++
 .../stacks/HDP/2.5/services/STORM/metainfo.xml  | 27 ++++++
 .../stacks/HDP/2.5/services/TEZ/metainfo.xml    | 26 ++++++
 .../stacks/HDP/2.5/services/YARN/metainfo.xml   | 27 ++++++
 .../HDP/2.5/services/ZOOKEEPER/metainfo.xml     | 26 ++++++
 .../stacks/HDP/2.5/services/stack_advisor.py    | 22 +++++
 ambari-server/src/test/python/TestSetupAgent.py |  2 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   | 12 +--
 27 files changed, 728 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/metainfo.xml
new file mode 100644
index 0000000..df3c4ee
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/metainfo.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <versions>
+    <active>true</active>
+  </versions>
+  <extends>2.4</extends>
+  <minJdk>1.7</minJdk>
+  <maxJdk>1.8</maxJdk>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/repos/repoinfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/repos/repoinfo.xml
new file mode 100644
index 0000000..acf1c07
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/repos/repoinfo.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<reposinfo>
+  <latest>http://s3.amazonaws.com/dev.hortonworks.com/HDP/hdp_urlinfo.json</latest>
+  <os family="redhat6">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os family="redhat7">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos7/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos7</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os family="suse11">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11sp3/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/suse11sp3</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os family="ubuntu12">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/ubuntu12/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/ubuntu12</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os family="debian7">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/debian7/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/debian6</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+  <os family="ubuntu14">
+    <repo>
+      <baseurl>http://s3.amazonaws.com/dev.hortonworks.com/HDP/ubuntu14/2.x/updates/2.5.0.0</baseurl>
+      <repoid>HDP-2.5</repoid>
+      <reponame>HDP</reponame>
+    </repo>
+    <repo>
+      <baseurl>http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/ubuntu12</baseurl>
+      <repoid>HDP-UTILS-1.1.0.20</repoid>
+      <reponame>HDP-UTILS</reponame>
+    </repo>
+  </os>
+</reposinfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/ACCUMULO/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ACCUMULO/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ACCUMULO/metainfo.xml
new file mode 100644
index 0000000..ec81f3e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ACCUMULO/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>ACCUMULO</name>
+      <version>1.7.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
new file mode 100644
index 0000000..66aea9d
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ATLAS/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>ATLAS</name>
+      <version>0.5.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
new file mode 100644
index 0000000..1998131
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FALCON/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FALCON</name>
+      <version>0.6.1.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/FLUME/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/FLUME/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FLUME/metainfo.xml
new file mode 100644
index 0000000..33ceb43
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/FLUME/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>FLUME</name>
+      <version>1.5.2.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/metainfo.xml
new file mode 100644
index 0000000..0feaa5e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HBASE</name>
+      <version>1.1.2.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..a3e4a64
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HDFS/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HDFS</name>
+      <version>2.7.1.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/metainfo.xml
new file mode 100644
index 0000000..3b43cc0
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HIVE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HIVE</name>
+      <version>1.2.1.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml
new file mode 100644
index 0000000..e722029
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KAFKA/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KAFKA</name>
+      <version>0.9.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/KERBEROS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KERBEROS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KERBEROS/metainfo.xml
new file mode 100644
index 0000000..1a931a3
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KERBEROS/metainfo.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KERBEROS</name>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
new file mode 100644
index 0000000..f94e1d6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/KNOX/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>KNOX</name>
+      <version>0.6.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/MAHOUT/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/MAHOUT/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/MAHOUT/metainfo.xml
new file mode 100644
index 0000000..9ed6957
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/MAHOUT/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>MAHOUT</name>
+      <version>0.9.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
new file mode 100644
index 0000000..64fdd23
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/OOZIE/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>OOZIE</name>
+      <version>4.2.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml
new file mode 100644
index 0000000..9d056bb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/PIG/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PIG</name>
+      <version>0.15.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER/metainfo.xml
new file mode 100644
index 0000000..73a3788
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>RANGER</name>
+      <version>0.5.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/metainfo.xml
new file mode 100644
index 0000000..02fb6aa
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/RANGER_KMS/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>RANGER_KMS</name>
+      <version>0.5.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/SLIDER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SLIDER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SLIDER/metainfo.xml
new file mode 100644
index 0000000..6502bfe
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SLIDER/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>SLIDER</name>
+      <version>0.80.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
new file mode 100644
index 0000000..107ca93
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SPARK/metainfo.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>SPARK</name>
+      <version>1.6.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/SQOOP/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SQOOP/metainfo.xml
new file mode 100644
index 0000000..eb67d63
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/SQOOP/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>SQOOP</name>
+      <version>1.4.6.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/STORM/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/STORM/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/STORM/metainfo.xml
new file mode 100644
index 0000000..a296bfa
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/STORM/metainfo.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>STORM</name>
+      <version>0.10.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/TEZ/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/TEZ/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/TEZ/metainfo.xml
new file mode 100644
index 0000000..30f0c25
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/TEZ/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>TEZ</name>
+      <version>0.7.0.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/metainfo.xml
new file mode 100644
index 0000000..4856d4a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/YARN/metainfo.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>YARN</name>
+      <version>2.7.1.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
new file mode 100644
index 0000000..de8ebd9
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZOOKEEPER/metainfo.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>ZOOKEEPER</name>
+      <version>3.4.6.2.5</version>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
new file mode 100644
index 0000000..9dd5cc7
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/stack_advisor.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+
+class HDP25StackAdvisor(HDP24StackAdvisor):
+  pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/test/python/TestSetupAgent.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestSetupAgent.py b/ambari-server/src/test/python/TestSetupAgent.py
index a366ddb..0b33e9f 100644
--- a/ambari-server/src/test/python/TestSetupAgent.py
+++ b/ambari-server/src/test/python/TestSetupAgent.py
@@ -307,7 +307,7 @@ class TestSetupAgent(TestCase):
   @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))
   @patch.object(subprocess, 'Popen')
   def test_execOsCommand(self, Popen_mock):
-    self.assertFalse(setup_agent.execOsCommand("hostname -f") == None)
+    self.assertIsNone(setup_agent.execOsCommand("hostname -f"))
 
   @only_for_platform(PLATFORM_WINDOWS)
   @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value))

http://git-wip-us.apache.org/repos/asf/ambari/blob/b339542e/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
index 4f059ba..7c578f2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
@@ -1820,7 +1820,7 @@ class TestHDP206StackAdvisor(TestCase):
     )
     recommendedDefaults = {"property1": "file:///grid/0/var/dir"}
     warn = self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo)
-    self.assertFalse(warn == None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': 'It is not recommended to use root partition for property1', 'level': 'WARN'}, warn)
 
     # Set by user /var mountpoint, which is non-root , but not preferable - no warning
@@ -1831,7 +1831,7 @@ class TestHDP206StackAdvisor(TestCase):
         "mountpoint" : "/var"
       }
     )
-    self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
+    self.assertIsNone(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo))
 
   def test_validatorEnoughDiskSpace(self):
     reqiuredDiskSpace = 1048576
@@ -1847,7 +1847,7 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     properties = {"property1": "file:///var/dir"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # local FS, no enough space
     hostInfo = {"disk_info": [
@@ -1858,16 +1858,16 @@ class TestHDP206StackAdvisor(TestCase):
       }
     ]}
     warn = self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace)
-    self.assertTrue(warn != None)
+    self.assertIsNotNone(warn)
     self.assertEquals({'message': errorMsg, 'level': 'WARN'}, warn)
 
     # non-local FS, HDFS
     properties = {"property1": "hdfs://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
     # non-local FS, WASB
     properties = {"property1": "wasb://h1"}
-    self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
+    self.assertIsNone(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace))
 
   def test_round_to_n(self):
     self.assertEquals(self.stack_advisor_impl.round_to_n(0), 0)


[26/33] ambari git commit: AMBARI-15079. Preupload.py should pre-create hdfs directories (aonishuk)

Posted by nc...@apache.org.
AMBARI-15079. Preupload.py should pre-create hdfs directories (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df2dbe15
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df2dbe15
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df2dbe15

Branch: refs/heads/branch-dev-patch-upgrade
Commit: df2dbe150b97ecb74ddf4652463b8062f288203a
Parents: b15c0f3
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Feb 18 00:50:21 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Feb 18 00:50:48 2016 +0200

----------------------------------------------------------------------
 .../libraries/providers/hdfs_resource.py        | 63 +++++++++++++-------
 .../libraries/resources/hdfs_resource.py        | 12 ++++
 .../1.6.1.2.2.0/package/scripts/params.py       |  1 +
 .../0.1.0/package/scripts/params.py             |  1 +
 .../0.5.0.2.1/package/scripts/params_linux.py   |  1 +
 .../0.96.0.2.0/package/scripts/params_linux.py  |  1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |  1 +
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |  1 +
 .../0.5.0.2.2/package/scripts/params_linux.py   |  1 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  1 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |  1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |  1 +
 .../0.60.0.2.2/package/scripts/params_linux.py  |  1 +
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  1 +
 .../0.9.1.2.1/package/scripts/params_linux.py   |  1 +
 .../0.4.0.2.1/package/scripts/params_linux.py   |  1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  1 +
 .../main/resources/scripts/Ambaripreupload.py   | 54 ++++++++++++++---
 .../2.0.6/hooks/before-START/scripts/params.py  |  1 +
 .../AMBARI_METRICS/test_metrics_collector.py    |  6 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 18 +++---
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 54 ++++++++---------
 .../stacks/2.0.6/HDFS/test_service_check.py     |  8 +--
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 28 ++++-----
 .../2.0.6/HIVE/test_hive_service_check.py       | 12 ++--
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 32 +++++-----
 .../stacks/2.0.6/OOZIE/test_service_check.py    | 10 ++--
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 12 ++--
 .../stacks/2.0.6/YARN/test_historyserver.py     | 29 +++++----
 .../2.0.6/YARN/test_mapreduce2_service_check.py | 12 ++--
 .../stacks/2.1/FALCON/test_falcon_server.py     | 12 ++--
 .../python/stacks/2.1/TEZ/test_service_check.py | 16 ++---
 .../stacks/2.1/YARN/test_apptimelineserver.py   |  2 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    | 12 ++--
 .../stacks/2.2/SPARK/test_job_history_server.py | 11 ++--
 .../2.3/MAHOUT/test_mahout_service_check.py     |  8 +--
 .../2.3/SPARK/test_spark_thrift_server.py       |  4 +-
 .../test/python/stacks/2.3/YARN/test_ats_1_5.py | 10 ++--
 39 files changed, 267 insertions(+), 175 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
index ebcf1a4..b73ae56 100644
--- a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
+++ b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
@@ -151,32 +151,13 @@ class WebHDFSUtil:
     # only hdfs seems to support webHDFS
     return (is_webhdfs_enabled and default_fs.startswith("hdfs"))
     
-  def parse_path(self, path):
-    """
-    hdfs://nn_url:1234/a/b/c -> /a/b/c
-    hdfs://nn_ha_name/a/b/c -> /a/b/c
-    hdfs:///a/b/c -> /a/b/c
-    /a/b/c -> /a/b/c
-    """
-    math_with_protocol_and_nn_url = re.match("[a-zA-Z]+://[^/]+(/.+)", path)
-    math_with_protocol = re.match("[a-zA-Z]+://(/.+)", path)
-    
-    if math_with_protocol_and_nn_url:
-      path = math_with_protocol_and_nn_url.group(1)
-    elif math_with_protocol:
-      path = math_with_protocol.group(1)
-    else:
-      path = path
-      
-    return re.sub("[/]+", "/", path)
-    
   valid_status_codes = ["200", "201"]
   def run_command(self, target, operation, method='POST', assertable_result=True, file_to_put=None, ignore_status_codes=[], **kwargs):
     """
     assertable_result - some POST requests return '{"boolean":false}' or '{"boolean":true}'
     depending on if query was successful or not, we can assert this for them
     """
-    target = self.parse_path(target)
+    target = HdfsResourceProvider.parse_path(target)
     
     url = format("{address}/webhdfs/v1{target}?op={operation}&user.name={run_user}", address=self.address, run_user=self.run_user)
     for k,v in kwargs.iteritems():
@@ -394,7 +375,7 @@ class HdfsResourceWebHDFS:
     
     
   def _fill_in_parent_directories(self, target, results):
-    path_parts = self.util.parse_path(target).split("/")[1:]# [1:] remove '' from parts
+    path_parts = HdfsResourceProvider.parse_path(target).split("/")[1:]# [1:] remove '' from parts
     path = "/"
 
     for path_part in path_parts:
@@ -416,13 +397,51 @@ class HdfsResourceProvider(Provider):
   def __init__(self, resource):
     super(HdfsResourceProvider,self).__init__(resource)
     self.fsType = getattr(resource, 'dfs_type')
+    self.ignored_resources_list = self.get_ignored_resources_list()
     if self.fsType != 'HCFS':
       self.assert_parameter_is_set('hdfs_site')
       self.webhdfs_enabled = self.resource.hdfs_site['dfs.webhdfs.enabled']
+      
+  @staticmethod
+  def parse_path(path):
+    """
+    hdfs://nn_url:1234/a/b/c -> /a/b/c
+    hdfs://nn_ha_name/a/b/c -> /a/b/c
+    hdfs:///a/b/c -> /a/b/c
+    /a/b/c -> /a/b/c
+    """
+    math_with_protocol_and_nn_url = re.match("[a-zA-Z]+://[^/]+(/.+)", path)
+    math_with_protocol = re.match("[a-zA-Z]+://(/.+)", path)
+    
+    if math_with_protocol_and_nn_url:
+      path = math_with_protocol_and_nn_url.group(1)
+    elif math_with_protocol:
+      path = math_with_protocol.group(1)
+    else:
+      path = path
+      
+    return re.sub("[/]+", "/", path)
+  
+  def get_ignored_resources_list(self):
+    if not self.resource.hdfs_resource_ignore_file or not os.path.exists(self.resource.hdfs_resource_ignore_file):
+      return []
+    
+    with open(self.resource.hdfs_resource_ignore_file, "rb") as fp:
+      content = fp.read()
+      
+    hdfs_resources_to_ignore = []
+    for hdfs_resource_to_ignore in content.split("\n"):
+      hdfs_resources_to_ignore.append(HdfsResourceProvider.parse_path(hdfs_resource_to_ignore))
+            
+    return hdfs_resources_to_ignore
     
   def action_delayed(self, action_name):
     self.assert_parameter_is_set('type')
-
+    
+    if HdfsResourceProvider.parse_path(self.resource.target) in self.ignored_resources_list:
+      Logger.info("Skipping '{0}' because it is in ignore file {1}.".format(self.resource, self.resource.hdfs_resource_ignore_file))
+      return
+    
     self.get_hdfs_resource_executor().action_delayed(action_name, self)
 
   def action_create_on_execute(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
index 7c12409..c5460a0 100644
--- a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
+++ b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
@@ -76,6 +76,18 @@ class HdfsResource(Resource):
   hadoop_bin_dir = ResourceArgument()
   hadoop_conf_dir = ResourceArgument()
   
+  """
+  Path to file which contains '\n'-separated list of hdfs resources, which should not
+  be managed. (simply any action to be skipped on it)
+  
+  This mean that if HdfsResource('/test1'..) is executed and /test1 is one of the lines
+  in the given file, the execution will be ignored.
+  
+  Example value:
+  /var/lib/ambari-agent/data/.hdfs_resource_ignore
+  """
+  hdfs_resource_ignore_file = ResourceArgument()
+  
   # WebHDFS needs these
   hdfs_site = ResourceArgument()
   default_fs = ResourceArgument()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 2bd2626..993d4cf 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -186,6 +186,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index f3a97fc..f1ff998 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -265,6 +265,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index b150464..6b37451 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -119,6 +119,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 6837bf1..03486a7 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -223,6 +223,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 29c4784..3fb4486 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -325,6 +325,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index dc17dba..e9500d9 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -443,6 +443,7 @@ import functools
 HdfsResource = functools.partial(
  HdfsResource,
   user = hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
index 47af240..d629533 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1.2.2/package/scripts/params.py
@@ -274,6 +274,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 7ec85b5..c42c123 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -343,6 +343,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 69c03ea..b1667a8 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -82,6 +82,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 7a2f6f6..81c894a 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -265,6 +265,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index f923723..fc6fd81 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -83,6 +83,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 09b7876..132ff77 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -64,6 +64,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 68c4f37..7bf1f1c 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -188,6 +188,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
index 25da2a1..33615ba 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
@@ -295,6 +295,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 25f867e..da52556 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -87,6 +87,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index f0b6927..2ef404d 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -268,6 +268,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 61db286..a803de4 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -166,7 +166,8 @@ with Environment() as env:
       hadoop_conf_dir = hadoop_conf_dir,
       principal_name = None,
       hdfs_site = hdfs_site,
-      default_fs = fs_default
+      default_fs = fs_default,
+      hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
     )
    
   def _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed):
@@ -231,7 +232,40 @@ with Environment() as env:
    
     source_and_dest_pairs = [(component_tar_source_file, destination_file), ]
     return _copy_files(source_and_dest_pairs, file_owner, group_owner, kinit_if_needed)
+  
+  def createHdfsResources():
+    params.HdfsResource('/atshistory', user='hdfs', change_permissions_for_parents=True, owner='yarn', group='hadoop', type='directory', action= ['create_on_execute'], mode=0755)
+    params.HdfsResource('/user/hcat', owner='hcat', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/hive/warehouse', owner='hive', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/user/hive', owner='hive', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource('/tmp', mode=0777, action=['create_on_execute'], type='directory', owner='hdfs')
+    params.HdfsResource('/user/ambari-qa', type='directory', action=['create_on_execute'], mode=0770)
+    params.HdfsResource('/user/oozie', owner='oozie', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/app-logs', recursive_chmod=True, owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/tmp/entity-file-history/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred', owner='mapred', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mapred/system', owner='hdfs', type='directory', action=['create_on_execute'])
+    params.HdfsResource('/mr-history/done', change_permissions_for_parents=True, owner='mapred', group='hadoop', type='directory', action=['create_on_execute'], mode=0777)
+    params.HdfsResource('/atshistory/done', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=0700)
+    params.HdfsResource('/atshistory/active', owner='yarn', group='hadoop', type='directory', action=['create_on_execute'], mode=01777)
+    params.HdfsResource('/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+    params.HdfsResource('/amshbase/staging', owner='ams', type='directory', action=['create_on_execute'], mode=0711)
+    params.HdfsResource('/user/ams/hbase', owner='ams', type='directory', action=['create_on_execute'], mode=0775)
+
 
+  def putCreatedHdfsResourcesToIgnore(env):
+    if not 'hdfs_files' in env.config:
+      Logger.info("Not creating .hdfs_resource_ignore as no resources to use.")
+      return
+    
+    file_content = ""
+    for file in env.config['hdfs_files']:
+      file_content += file['target']
+      file_content += "\n"
+      
+    with open("/var/lib/ambari-agent/data/.hdfs_resource_ignore", "a+") as fp:
+      fp.write(file_content)
+      
   env.set_params(params)
   hadoop_conf_dir = params.hadoop_conf_dir
    
@@ -272,7 +306,7 @@ with Environment() as env:
   # DON'T CHANGE THE VALUE SINCE IT'S USED TO DETERMINE WHETHER TO RUN THE COMMAND OR NOT BY READING THE MARKER FILE.
   # Oozie tmp dir should be /var/tmp/oozie and is already created by a function above.
   command = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war {oozie_secure} ")
-  command_to_file = format("cd {oozie_tmp_dir} && {oozie_setup_sh_current} prepare-war {oozie_secure} ")
+  command_to_file = format("cd {oozie_tmp_dir} && {oozie_setup_sh_current} prepare-war {oozie_secure} ").strip()
 
   run_prepare_war = False
   if os.path.exists(prepare_war_cmd_file):
@@ -338,7 +372,9 @@ with Environment() as env:
   copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/pig/pig.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/pig/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
   copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/hadoop-mapreduce/hadoop-streaming.jar"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/mapreduce/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
   copy_tarballs_to_hdfs(format("/usr/hdp/{hdp_version}/sqoop/sqoop.tar.gz"), hdfs_path_prefix+"/hdp/apps/{{ hdp_stack_version }}/sqoop/", 'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, params.user_group)
-
+  print "Creating hdfs directories..."
+  createHdfsResources()
+  putCreatedHdfsResourcesToIgnore(env)
   
   # jar shouldn't be used before (read comment below)
   File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
@@ -347,8 +383,12 @@ with Environment() as env:
   )
   # Create everything in one jar call (this is fast).
   # (! Before everything should be executed with action="create_on_execute/delete_on_execute" for this time-optimization to work)
-  params.HdfsResource(None, 
-               logoutput=True,
-               action="execute"
-  )
+  try:
+    params.HdfsResource(None, 
+                 logoutput=True,
+                 action="execute"
+    )
+  except:
+    os.remove("/var/lib/ambari-agent/data/.hdfs_resource_ignore")
+    raise
   print "Completed tarball copy. Ambari preupload script completed."

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 2a9d7c5..3570e45 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -240,6 +240,7 @@ import functools
 HdfsResource = functools.partial(
   HdfsResource,
   user=hdfs_user,
+  hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
index 96e2286..64b16c6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/AMBARI_METRICS/test_metrics_collector.py
@@ -311,7 +311,7 @@ class TestMetricsCollector(RMFTestCase):
                                   mode = 0775,
                                   hadoop_conf_dir = '/etc/hadoop/conf',
                                   type = 'directory',
-                                  action = ['create_on_execute'],
+                                  action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   hdfs_site=self.getConfig()['configurations']['hdfs-site'],
                                   principal_name=UnknownConfigurationMock(),
                                   default_fs='hdfs://c6401.ambari.apache.org:8020',
@@ -327,7 +327,7 @@ class TestMetricsCollector(RMFTestCase):
                                   mode = 0711,
                                   hadoop_conf_dir = '/etc/hadoop/conf',
                                   type = 'directory',
-                                  action = ['create_on_execute'],
+                                  action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   hdfs_site=self.getConfig()['configurations']['hdfs-site'],
                                   principal_name=UnknownConfigurationMock(),
                                   default_fs='hdfs://c6401.ambari.apache.org:8020',
@@ -339,7 +339,7 @@ class TestMetricsCollector(RMFTestCase):
                                   kinit_path_local = '/usr/bin/kinit',
                                   user = 'hdfs',
                                   hadoop_conf_dir = '/etc/hadoop/conf',
-                                  action = ['execute'],
+                                  action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   hdfs_site=self.getConfig()['configurations']['hdfs-site'],
                                   principal_name=UnknownConfigurationMock(),
                                   default_fs='hdfs://c6401.ambari.apache.org:8020',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index 13b2e33..d8cecec 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -340,7 +340,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
         security_enabled = False,
@@ -353,7 +353,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0711,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -364,7 +364,7 @@ class TestHBaseMaster(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
 
@@ -478,7 +478,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
         security_enabled = True,
@@ -491,7 +491,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0711,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -502,7 +502,7 @@ class TestHBaseMaster(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
 
@@ -627,7 +627,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
         security_enabled = False,
@@ -642,7 +642,7 @@ class TestHBaseMaster(RMFTestCase):
         owner = 'hbase',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0711,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -655,7 +655,7 @@ class TestHBaseMaster(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 39244ff..fc22d08 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -108,7 +108,7 @@ class TestNamenode(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -125,7 +125,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -139,7 +139,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -221,7 +221,7 @@ class TestNamenode(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -238,7 +238,7 @@ class TestNamenode(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -252,7 +252,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -346,7 +346,7 @@ class TestNamenode(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
         only_if = True
     )
@@ -360,7 +360,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0770,
         only_if = True
     )
@@ -372,7 +372,7 @@ class TestNamenode(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -446,7 +446,7 @@ class TestNamenode(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -463,7 +463,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -477,7 +477,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -543,7 +543,7 @@ class TestNamenode(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -560,7 +560,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -574,7 +574,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -646,7 +646,7 @@ class TestNamenode(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -663,7 +663,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -677,7 +677,7 @@ class TestNamenode(RMFTestCase):
         principal_name = 'hdfs',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -749,7 +749,7 @@ class TestNamenode(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -766,7 +766,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -780,7 +780,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -851,7 +851,7 @@ class TestNamenode(RMFTestCase):
         owner = 'hdfs',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -868,7 +868,7 @@ class TestNamenode(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -882,7 +882,7 @@ class TestNamenode(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
@@ -961,7 +961,7 @@ class TestNamenode(RMFTestCase):
                               owner = 'hdfs',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0777,
                               )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
@@ -978,7 +978,7 @@ class TestNamenode(RMFTestCase):
                               owner = 'ambari-qa',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0770,
                               )
     self.assertResourceCalled('HdfsResource', None,
@@ -992,7 +992,7 @@ class TestNamenode(RMFTestCase):
                               principal_name = None,
                               user = 'hdfs',
                               dfs_type = '',
-                              action = ['execute'],
+                              action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 0f5afa8..851aee2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -69,7 +69,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         mode = 0777,
@@ -84,7 +84,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'],
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
     )
@@ -99,7 +99,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
     )
@@ -113,7 +113,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = None,
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 494d16c..9929d00 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -340,7 +340,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
     )
     self.assertResourceCalled('HdfsResource', '/user/hcat',
@@ -353,7 +353,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
     )
 
@@ -371,7 +371,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/hive',
@@ -384,7 +384,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
     )
     if not no_tmp:
@@ -399,7 +399,7 @@ class TestHiveServer(RMFTestCase):
           group = 'hdfs',
           hadoop_bin_dir = '/usr/bin',
           type = 'directory',
-          action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+          action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
           mode = 0777,
       )
     self.assertResourceCalled('HdfsResource', None,
@@ -409,7 +409,7 @@ class TestHiveServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
@@ -528,7 +528,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
     )
     self.assertResourceCalled('HdfsResource', '/user/hcat',
@@ -541,7 +541,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hcat',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
     )
 
@@ -555,7 +555,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', '/user/hive',
@@ -568,7 +568,7 @@ class TestHiveServer(RMFTestCase):
         owner = 'hive',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
     )
     self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
@@ -582,7 +582,7 @@ class TestHiveServer(RMFTestCase):
         group = 'hdfs',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -592,7 +592,7 @@ class TestHiveServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/hive',
@@ -926,7 +926,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertNoMoreResources()
@@ -968,7 +968,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index ea17c27..589b437 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -100,7 +100,7 @@ class TestServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
         security_enabled = False,
@@ -116,7 +116,7 @@ class TestServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -128,7 +128,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = 'missing_principal',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig no_keytab false /usr/bin/kinit no_principal',
@@ -212,7 +212,7 @@ class TestServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/tmp/idtest.ambari-qa.1431110511.43.in',
         security_enabled = True,
@@ -228,7 +228,7 @@ class TestServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
@@ -240,7 +240,7 @@ class TestServiceCheck(RMFTestCase):
         principal_name = 'hdfs',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/tmp/templetonSmoke.sh c6402.ambari.apache.org ambari-qa 50111 idtest.ambari-qa.1431110511.43.pig /etc/security/keytabs/smokeuser.headless.keytab true /usr/bin/kinit ambari-qa@EXAMPLE.COM',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index ba1b84a..0683551 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -73,7 +73,7 @@ class TestOozieServer(RMFTestCase):
         owner = 'oozie',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0775,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -83,7 +83,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -291,7 +291,7 @@ class TestOozieServer(RMFTestCase):
                               owner = 'oozie',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               type = 'directory',
-                              action = ['create_on_execute'],
+                              action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               mode = 0775,
                               )
     self.assertResourceCalled('HdfsResource', None,
@@ -304,7 +304,7 @@ class TestOozieServer(RMFTestCase):
                               principal_name = UnknownConfigurationMock(),
                               user = 'hdfs',
                               dfs_type = '',
-                              action = ['execute'],
+                              action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hadoop_conf_dir = '/etc/hadoop/conf',
                               )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -521,7 +521,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         principal_name = UnknownConfigurationMock(),
         recursive_chmod = True,
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         mode = 0755,
@@ -536,7 +536,7 @@ class TestOozieServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -616,7 +616,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         principal_name = 'hdfs',
         recursive_chmod = True,
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         mode = 0755,
@@ -631,7 +631,7 @@ class TestOozieServer(RMFTestCase):
         principal_name = 'hdfs',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', 'cd /var/tmp/oozie && /usr/lib/oozie/bin/oozie-start.sh',
@@ -670,7 +670,7 @@ class TestOozieServer(RMFTestCase):
         owner = 'oozie',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0775,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -680,7 +680,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -858,7 +858,7 @@ class TestOozieServer(RMFTestCase):
         owner = 'oozie',
         hadoop_bin_dir = '/usr/bin',
         type = 'directory',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0775,
     )
     self.assertResourceCalled('HdfsResource', None,
@@ -869,7 +869,7 @@ class TestOozieServer(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
@@ -1406,7 +1406,7 @@ class TestOozieServer(RMFTestCase):
       group = 'hadoop',
       hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
       type = 'directory',
-      action = ['create_on_execute'],
+      action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
       mode = 0755 )
 
     self.assertResourceCalled('HdfsResource', None,
@@ -1419,7 +1419,7 @@ class TestOozieServer(RMFTestCase):
       principal_name = UnknownConfigurationMock(),
       user = 'hdfs', 
       dfs_type = '',
-      action = ['execute'],
+      action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
       hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
 
     self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020',
@@ -1479,7 +1479,7 @@ class TestOozieServer(RMFTestCase):
       group = 'hadoop',
       hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
       type = 'directory',
-      action = ['create_on_execute'],
+      action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
       mode = 0755 )
 
     self.assertResourceCalled('HdfsResource', None,
@@ -1492,7 +1492,7 @@ class TestOozieServer(RMFTestCase):
       principal_name = UnknownConfigurationMock(),
       user = 'hdfs',
       dfs_type = '',
-      action = ['execute'],
+      action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
       hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf' )
 
     self.assertResourceCalled('Execute', '/usr/hdp/2.3.0.0-1234/oozie/bin/oozie-setup.sh sharelib create -fs hdfs://c6401.ambari.apache.org:8020',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
index a6d0145..dc35612 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_service_check.py
@@ -89,7 +89,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
     )
@@ -101,7 +101,7 @@ class TestServiceCheck(RMFTestCase):
         source = '//examples',
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         owner = 'ambari-qa',
@@ -114,7 +114,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['delete_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
     )
@@ -126,7 +126,7 @@ class TestServiceCheck(RMFTestCase):
         source = '//examples/input-data',
         user = 'hdfs',
         dfs_type = '',
-        action = ['create_on_execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         owner = 'ambari-qa',
@@ -139,7 +139,7 @@ class TestServiceCheck(RMFTestCase):
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'], hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/tmp/oozieSmoke2.sh suse /var/lib/oozie /etc/oozie/conf /usr/bin http://c6402.ambari.apache.org:11000/oozie / /etc/hadoop/conf /usr/bin ambari-qa False',

http://git-wip-us.apache.org/repos/asf/ambari/blob/df2dbe15/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
index c5de4c3..9c0b035 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_service_check.py
@@ -45,7 +45,7 @@ class TestPigServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         dfs_type = '',
         type = 'directory',
-        action = ['delete_on_execute'],
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
         security_enabled = False,
@@ -61,7 +61,7 @@ class TestPigServiceCheck(RMFTestCase):
         hadoop_conf_dir = '/etc/hadoop/conf',
         dfs_type = '',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = False,
@@ -73,7 +73,7 @@ class TestPigServiceCheck(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
        
@@ -119,7 +119,7 @@ class TestPigServiceCheck(RMFTestCase):
         owner = 'ambari-qa',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
-        action = ['delete_on_execute'],
+        action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/passwd',
         security_enabled = True,
@@ -135,7 +135,7 @@ class TestPigServiceCheck(RMFTestCase):
         dfs_type = '',
         hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'file',
-        action = ['create_on_execute'],
+        action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
@@ -147,7 +147,7 @@ class TestPigServiceCheck(RMFTestCase):
         principal_name = 'hdfs',
         user = 'hdfs',
         dfs_type = '',
-        action = ['execute'],
+        action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa@EXAMPLE.COM;',


[12/33] ambari git commit: AMBARI-15054 Changing Yarn queues does not update hive.server2.tez.default.queues. (ababiichuk)

Posted by nc...@apache.org.
AMBARI-15054 Changing Yarn queues does not update hive.server2.tez.default.queues. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8cfb2db6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8cfb2db6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8cfb2db6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8cfb2db63021fe5d1ed7f44f1d4412e397810090
Parents: e4800e1
Author: ababiichuk <ab...@hortonworks.com>
Authored: Wed Feb 17 11:55:27 2016 +0200
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Wed Feb 17 11:55:27 2016 +0200

----------------------------------------------------------------------
 .../controllers/main/service/info/configs.js    |  23 +-
 ambari-web/app/controllers/wizard.js            |   3 -
 .../app/controllers/wizard/step7_controller.js  |   6 +-
 .../app/mixins/common/configs/configs_saver.js  |   4 -
 .../configs/objects/service_config_property.js  |   2 +
 ambari-web/app/utils/config.js                  | 151 +++++-------
 ambari-web/app/views/common/controls_view.js    |  75 +++++-
 .../test/controllers/wizard/step7_test.js       |  26 --
 ambari-web/test/utils/config_test.js            | 242 -------------------
 9 files changed, 140 insertions(+), 392 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/controllers/main/service/info/configs.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/info/configs.js b/ambari-web/app/controllers/main/service/info/configs.js
index b6a434c..3cd652a 100644
--- a/ambari-web/app/controllers/main/service/info/configs.js
+++ b/ambari-web/app/controllers/main/service/info/configs.js
@@ -181,24 +181,6 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ConfigsLoader, A
   ],
 
   /**
-   * get array of config properties that are shown in settings tab
-   * @type {String[]}
-   */
-  settingsTabProperties: function () {
-    var properties = [];
-    App.Tab.find().forEach(function (t) {
-      if (!t.get('isAdvanced') && t.get('serviceName') === this.get('content.serviceName')) {
-        t.get('sections').forEach(function (s) {
-          s.get('subSections').forEach(function (ss) {
-            properties = properties.concat(ss.get('configProperties'));
-          });
-        });
-      }
-    }, this);
-    return properties;
-  }.property('content.serviceName', 'App.router.clusterController.isStackConfigsLoaded'),
-
-  /**
    * Dropdown menu items in filter combobox
    * @type {{attributeName: string, attributeValue: string, name: string, selected: boolean}[]}
    */
@@ -379,8 +361,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ConfigsLoader, A
 
     //put properties from capacity-scheduler.xml into one config with textarea view
     if (this.get('content.serviceName') === 'YARN') {
-      var configsToSkip = this.get('settingsTabProperties').filterProperty('filename', 'capacity-scheduler.xml');
-      configs = App.config.fileConfigsIntoTextarea(configs, 'capacity-scheduler.xml', configsToSkip);
+      configs = App.config.addYarnCapacityScheduler(configs);
     }
 
     if (this.get('content.serviceName') === 'KERBEROS') {
@@ -407,7 +388,7 @@ App.MainServiceInfoConfigsController = Em.Controller.extend(App.ConfigsLoader, A
    * @method mergeWithStackProperties
    */
   mergeWithStackProperties: function (configs) {
-    this.get('settingsTabProperties').forEach(function (advanced_id) {
+    App.config.getPropertiesFromTheme(this.get('content.serviceName')).forEach(function (advanced_id) {
       if (!configs.someProperty('id', advanced_id)) {
         var advanced = App.configsCollection.getConfig(advanced_id);
         if (advanced) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/controllers/wizard.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard.js b/ambari-web/app/controllers/wizard.js
index 05ef68e..329d246 100644
--- a/ambari-web/app/controllers/wizard.js
+++ b/ambari-web/app/controllers/wizard.js
@@ -889,9 +889,6 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM
     var installedServiceNames = stepController.get('installedServiceNames') || [];
     var installedServiceNamesMap = installedServiceNames.toWickMap();
     stepController.get('stepConfigs').forEach(function (_content) {
-      if (_content.serviceName === 'YARN') {
-        _content.set('configs', App.config.textareaIntoFileConfigs(_content.get('configs'), 'capacity-scheduler.xml'));
-      }
       _content.get('configs').forEach(function (_configProperties) {
         if (!Em.isNone(_configProperties.get('group'))) {
           return false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/controllers/wizard/step7_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js b/ambari-web/app/controllers/wizard/step7_controller.js
index 634c86b..0b29fe5 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -718,9 +718,6 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
   },
 
   applyServicesConfigs: function (configs) {
-    if (this.get('allSelectedServiceNames').contains('YARN')) {
-      configs = App.config.fileConfigsIntoTextarea(configs, 'capacity-scheduler.xml', []);
-    }
     if (!this.get('installedServiceNames').contains('HAWQ') && this.get('allSelectedServiceNames').contains('HAWQ')) {
       this.updateHawqConfigs(configs);
     }
@@ -941,6 +938,9 @@ App.WizardStep7Controller = Em.Controller.extend(App.ServerValidatorMixin, App.E
     }, this);
 
     stepConfigs.forEach(function (service) {
+      if (service.get('serviceName') === 'YARN') {
+        configsByService[service.get('serviceName')] = App.config.addYarnCapacityScheduler(configsByService[service.get('serviceName')]);
+      }
       service.set('configs', configsByService[service.get('serviceName')]);
       if (['addServiceController', 'installerController'].contains(this.get('wizardController.name'))) {
         this.addHostNamesToConfigs(service, localDB.masterComponentHosts, localDB.slaveComponentHosts);

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/mixins/common/configs/configs_saver.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/configs/configs_saver.js b/ambari-web/app/mixins/common/configs/configs_saver.js
index d2b2623..c2da486 100644
--- a/ambari-web/app/mixins/common/configs/configs_saver.js
+++ b/ambari-web/app/mixins/common/configs/configs_saver.js
@@ -293,10 +293,6 @@ App.ConfigsSaverMixin = Em.Mixin.create({
    */
   getServiceConfigToSave: function(serviceName, configs) {
 
-    if (serviceName === 'YARN') {
-      configs = App.config.textareaIntoFileConfigs(configs, 'capacity-scheduler.xml');
-    }
-
     //generates list of properties that was changed
     var modifiedConfigs = this.getModifiedConfigs(configs);
     var serviceFilenames = Object.keys(App.StackService.find(serviceName).get('configTypes')).map(function (type) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/models/configs/objects/service_config_property.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/configs/objects/service_config_property.js b/ambari-web/app/models/configs/objects/service_config_property.js
index 21e3dd3..a437c98 100644
--- a/ambari-web/app/models/configs/objects/service_config_property.js
+++ b/ambari-web/app/models/configs/objects/service_config_property.js
@@ -329,6 +329,8 @@ App.ServiceConfigProperty = Em.Object.extend({
         return App.ServiceConfigComponentHostsView;
       case 'supportTextConnection':
         return App.checkConnectionView;
+      case 'capacityScheduler':
+        return App.CapacitySceduler;
       default:
         if (this.get('unit')) {
           return App.ServiceConfigTextFieldWithUnit;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/utils/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js
index d18c937..40d4699 100644
--- a/ambari-web/app/utils/config.js
+++ b/ambari-web/app/utils/config.js
@@ -438,7 +438,13 @@ App.config = Em.Object.create({
    * @returns {string}
    */
   getDefaultCategory: function(stackConfigProperty, fileName) {
-    return (stackConfigProperty ? 'Advanced ' : 'Custom ') + this.getConfigTagFromFileName(fileName);
+    var tag = this.getConfigTagFromFileName(fileName);
+    switch (tag) {
+      case 'capacity-scheduler':
+        return 'CapacityScheduler';
+      default :
+        return (stackConfigProperty ? 'Advanced ' : 'Custom ') + tag;
+    }
   },
 
   /**
@@ -693,106 +699,67 @@ App.config = Em.Object.create({
     return App.ServiceConfigProperty.create(propertyObject);
   },
 
-  complexConfigsTemplate: [
-    {
-      "name": "capacity-scheduler",
-      "displayName": "Capacity Scheduler",
-      "value": "",
-      "description": "Capacity Scheduler properties",
-      "displayType": "custom",
-      "isOverridable": true,
-      "isRequired": true,
-      "isVisible": true,
-      "isReconfigurable": true,
-      "supportsFinal": false,
-      "serviceName": "YARN",
-      "filename": "capacity-scheduler.xml",
-      "category": "CapacityScheduler"
-    }
-  ],
-
   /**
-   * transform set of configs from file
-   * into one config with textarea content:
-   * name=value
-   * @param {App.ServiceConfigProperty[]} configs
-   * @param {String} filename
-   * @param {App.ServiceConfigProperty[]} [configsToSkip=[]]
-   * @return {*}
+   *
+   * @param configs
    */
-  fileConfigsIntoTextarea: function (configs, filename, configsToSkip) {
-    var fileConfigs = configs.filterProperty('filename', filename);
-    var value = '', savedValue = '', recommendedValue = '';
-    var template = this.get('complexConfigsTemplate').findProperty('filename', filename);
-    var complexConfig = $.extend({}, template);
-    if (complexConfig) {
-      fileConfigs.forEach(function (_config) {
-        if (!(configsToSkip && configsToSkip.someProperty('name', _config.name))) {
-          value += _config.name + '=' + _config.value + '\n';
-          if (!Em.isNone(_config.savedValue)) {
-            savedValue += _config.name + '=' + _config.savedValue + '\n';
-          }
-          if (!Em.isNone(_config.recommendedValue)) {
-            recommendedValue += _config.name + '=' + _config.recommendedValue + '\n';
-          }
-        }
-      }, this);
-      var isFinal = fileConfigs.someProperty('isFinal', true);
-      var savedIsFinal = fileConfigs.someProperty('savedIsFinal', true);
-      var recommendedIsFinal = fileConfigs.someProperty('recommendedIsFinal', true);
-      complexConfig.value = value;
-      if (savedValue) {
-        complexConfig.savedValue = savedValue;
+  addYarnCapacityScheduler: function(configs) {
+    var value = '', savedValue = '', recommendedValue = '',
+      excludedConfigs = App.config.getPropertiesFromTheme('YARN');
+
+    var connectedConfigs = configs.filter(function(config) {
+      return !excludedConfigs.contains(App.config.configId(config.get('name'), config.get('filename'))) && (config.get('filename') === 'capacity-scheduler.xml');
+    });
+    connectedConfigs.setEach('isVisible', false);
+
+    connectedConfigs.forEach(function (config) {
+      value += config.get('name') + '=' + config.get('value') + '\n';
+      if (!Em.isNone(config.get('savedValue'))) {
+        savedValue += config.get('name') + '=' + config.get('savedValue') + '\n';
       }
-      if (recommendedValue) {
-        complexConfig.recommendedValue = recommendedValue;
+      if (!Em.isNone(config.get('recommendedValue'))) {
+        recommendedValue += config.get('name') + '=' + config.get('recommendedValue') + '\n';
       }
-      complexConfig.isFinal = isFinal;
-      complexConfig.savedIsFinal = savedIsFinal;
-      complexConfig.recommendedIsFinal = recommendedIsFinal;
-      configs = configs.filter(function (_config) {
-        return _config.filename !== filename || (configsToSkip && configsToSkip.someProperty('name', _config.name));
-      });
-      configs.push(App.ServiceConfigProperty.create(complexConfig));
-    }
+    }, this);
+
+    var isFinal = connectedConfigs.someProperty('isFinal', true);
+    var savedIsFinal = connectedConfigs.someProperty('savedIsFinal', true);
+    var recommendedIsFinal = connectedConfigs.someProperty('recommendedIsFinal', true);
+
+    var cs = App.config.createDefaultConfig('capacity-scheduler', 'YARN', 'capacity-scheduler.xml', true, {
+      'value': value,
+      'savedValue': savedValue || null,
+      'recommendedValue': recommendedValue || null,
+      'isFinal': isFinal,
+      'savedIsFinal': savedIsFinal,
+      'recommendedIsFinal': recommendedIsFinal,
+      'displayName': 'Capacity Scheduler',
+      'description': 'Capacity Scheduler properties',
+      'displayType': 'capacityScheduler',
+      'isRequiredByAgent': false
+    });
+
+    configs.push(App.ServiceConfigProperty.create(cs));
     return configs;
   },
 
   /**
-   * transform one config with textarea content
-   * into set of configs of file
-   * @param configs
-   * @param filename
-   * @return {*}
+   *
+   * @param serviceName
+   * @returns {Array}
    */
-  textareaIntoFileConfigs: function (configs, filename) {
-    var complexConfigName = this.get('complexConfigsTemplate').findProperty('filename', filename).name;
-    var configsTextarea = configs.findProperty('name', complexConfigName);
-    if (configsTextarea && !App.get('testMode')) {
-      var properties = configsTextarea.get('value').split('\n');
-
-      properties.forEach(function (_property) {
-        var name, value;
-        if (_property) {
-          _property = _property.split('=');
-          name = _property[0];
-          value = (_property[1]) ? _property[1] : "";
-          configs.push(Em.Object.create({
-            name: name,
-            value: value,
-            savedValue: value,
-            serviceName: configsTextarea.get('serviceName'),
-            filename: filename,
-            isFinal: configsTextarea.get('isFinal'),
-            isNotDefaultValue: configsTextarea.get('isNotDefaultValue'),
-            isRequiredByAgent: configsTextarea.get('isRequiredByAgent'),
-            group: null
-          }));
-        }
-      });
-      return configs.without(configsTextarea);
-    }
-    return configs;
+  getPropertiesFromTheme: function (serviceName) {
+    var properties = [];
+    App.Tab.find().forEach(function (t) {
+      if (!t.get('isAdvanced') && t.get('serviceName') === serviceName) {
+        t.get('sections').forEach(function (s) {
+          s.get('subSections').forEach(function (ss) {
+            properties = properties.concat(ss.get('configProperties'));
+          });
+        });
+      }
+    }, this);
+    return properties;
   },
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/app/views/common/controls_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/common/controls_view.js b/ambari-web/app/views/common/controls_view.js
index 9226b15..2290f58 100644
--- a/ambari-web/app/views/common/controls_view.js
+++ b/ambari-web/app/views/common/controls_view.js
@@ -104,7 +104,7 @@ App.SupportsDependentConfigs = Ember.Mixin.create({
       var type = App.config.getConfigTagFromFileName(config.get('filename'));
       var p = App.configsCollection.getConfig(App.config.configId(name, type));
       controller.removeCurrentFromDependentList(config, saveRecommended);
-       if ((p && Em.get(p, 'propertyDependedBy.length') > 0 || Em.get(p, 'displayType') === 'user') && config.get('oldValue') !== config.get('value')) {
+       if ((p && Em.get(p, 'propertyDependedBy.length') > 0 || config.get('displayType') === 'user') && config.get('oldValue') !== config.get('value')) {
          var old = config.get('oldValue');
          config.set('oldValue', config.get('value'));
          return controller.loadConfigRecommendations([{
@@ -295,6 +295,79 @@ App.ServiceConfigTextArea = Ember.TextArea.extend(App.ServiceConfigPopoverSuppor
   widthClass: 'span9'
 });
 
+
+/**
+ * Special config type for Capacity Scheduler
+ */
+App.CapacitySceduler = Ember.TextArea.extend(App.ServiceConfigPopoverSupport, App.ServiceConfigCalculateId, App.SupportsDependentConfigs, {
+
+  configs: function() {
+    return this.get('controller.stepConfigs').findProperty('serviceName', 'YARN').get('configs');
+  }.property('controller.stepConfigs'),
+
+  valueBinding: 'serviceConfig.value',
+  excludedConfigs: function() {
+    return App.config.getPropertiesFromTheme('YARN');
+  }.property(),
+  rows: 16,
+  classNames: ['directories'],
+  classNameBindings: ['widthClass'],
+  widthClass: 'span9',
+
+  connectedConfigs: function() {
+    return this.get('categoryConfigsAll').filter(function(config) {
+      return !this.get('excludedConfigs').contains(App.config.configId(config.get('name'), config.get('filename')))
+        && (config.get('name') !== this.get('serviceConfig.name'))
+        && (config.get('filename') === 'capacity-scheduler.xml');
+    }, this);
+  }.property('categoryConfigsAll.length'),
+
+  valueObserver: function () {
+    var self = this, controller = this.get('controller'),
+      names = [];
+    delay(function () {
+      self.get('serviceConfig.value').split('\n').forEach(function (_property) {
+        if (_property) {
+          _property = _property.split('=');
+          var name = _property[0];
+          var value = (_property[1]) ? _property[1] : "";
+
+          names.push(name);
+
+          var cfg = self.get('connectedConfigs').findProperty('name', name);
+          if (cfg) {
+            /** update configs **/
+            if (cfg.get('value') !== value) {
+              cfg.set('value', value);
+              self.sendRequestRorDependentConfigs(cfg, controller);
+            }
+          } else {
+            /** add configs **/
+            var newCfg = App.config.getDefaultConfig(name, 'YARN', 'capacity-scheduler', {
+              'value': value
+            });
+            self.get('configs').pushObject(App.ServiceConfigProperty.create(newCfg));
+          }
+        }
+      });
+
+      /** remove configs **/
+      self.get('connectedConfigs').filter(function(c) {
+        return !names.contains(c.get('name'));
+      }).forEach(function(c) {
+        self.get('configs').removeObject(c);
+      });
+    }, 500);
+  }.observes('serviceConfig.value'),
+
+  /**
+   * update fina; value for connected configs
+   */
+  isFinalObserver: function () {
+    this.get('connectedConfigs').setEach('isFinal', this.get('serviceConfig.isFinal'));
+  }.observes('serviceConfig.isFinal')
+});
+
 /**
  * Textarea control for content type
  * @type {*}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/test/controllers/wizard/step7_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/wizard/step7_test.js b/ambari-web/test/controllers/wizard/step7_test.js
index 8a4a96f..a089150 100644
--- a/ambari-web/test/controllers/wizard/step7_test.js
+++ b/ambari-web/test/controllers/wizard/step7_test.js
@@ -1071,7 +1071,6 @@ describe('App.InstallerStep7Controller', function () {
           stackConfigsLoaded: true
         })
       });
-      sinon.stub(App.config, 'fileConfigsIntoTextarea', Em.K);
       sinon.stub(installerStep7Controller, 'clearStep', Em.K);
       sinon.stub(installerStep7Controller, 'getConfigTags', Em.K);
       sinon.stub(installerStep7Controller, 'setInstalledServiceConfigs', Em.K);
@@ -1081,7 +1080,6 @@ describe('App.InstallerStep7Controller', function () {
       sinon.stub(App.router, 'send', Em.K);
     });
     afterEach(function () {
-      App.config.fileConfigsIntoTextarea.restore();
       installerStep7Controller.clearStep.restore();
       installerStep7Controller.getConfigTags.restore();
       installerStep7Controller.setInstalledServiceConfigs.restore();
@@ -1111,9 +1109,6 @@ describe('App.InstallerStep7Controller', function () {
       installerStep7Controller.reopen({
         allSelectedServiceNames: []
       });
-      sinon.stub(App.config, 'fileConfigsIntoTextarea', function(configs) {
-        return configs;
-      });
       sinon.stub(installerStep7Controller, 'loadConfigRecommendations', function(c, callback) {
         return callback();
       });
@@ -1137,7 +1132,6 @@ describe('App.InstallerStep7Controller', function () {
     });
 
     afterEach(function () {
-      App.config.fileConfigsIntoTextarea.restore();
       installerStep7Controller.loadConfigRecommendations.restore();
       installerStep7Controller.checkHostOverrideInstaller.restore();
       installerStep7Controller.selectProperService.restore();
@@ -1158,26 +1152,6 @@ describe('App.InstallerStep7Controller', function () {
      expect(installerStep7Controller.selectProperService.calledOnce).to.equal(true);
     });
 
-    Em.A([
-      {
-        allSelectedServiceNames: ['YARN'],
-        fileConfigsIntoTextarea: true,
-        m: 'should run fileConfigsIntoTextarea'
-      }
-    ]).forEach(function(t) {
-      it(t.m, function () {
-        installerStep7Controller.reopen({
-          allSelectedServiceNames: t.allSelectedServiceNames
-        });
-        installerStep7Controller.applyServicesConfigs([{name: 'configs'}]);
-        if (t.fileConfigsIntoTextarea) {
-          expect(App.config.fileConfigsIntoTextarea.calledWith([{name: 'configs'}], 'capacity-scheduler.xml')).to.equal(true);
-        } else {
-          expect(App.config.fileConfigsIntoTextarea.calledOnce).to.equal(false);
-        }
-      });
-    });
-
   });
 
   describe('#removeHawqStandbyHostAddressConfig', function() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cfb2db6/ambari-web/test/utils/config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/utils/config_test.js b/ambari-web/test/utils/config_test.js
index 0df828e..18471a2 100644
--- a/ambari-web/test/utils/config_test.js
+++ b/ambari-web/test/utils/config_test.js
@@ -29,248 +29,6 @@ function dummyCopy(val) {
 
 describe('App.config', function () {
 
-  describe('#fileConfigsIntoTextarea', function () {
-
-    var filename = 'capacity-scheduler.xml';
-
-    var configs = [
-      {
-        name: 'config1',
-        value: 'value1',
-        recommendedValue: 'value1',
-        filename: 'capacity-scheduler.xml'
-      },
-      {
-        name: 'config2',
-        value: 'value2',
-        recommendedValue: 'value2',
-        filename: 'capacity-scheduler.xml'
-      }
-    ];
-
-    var c3 = {
-      name: 'config3',
-      value: 'value3',
-      recommendedValue: 'value3',
-      filename: 'capacity-scheduler.xml'
-    };
-
-    describe('two configs into textarea', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.fileConfigsIntoTextarea(configs, filename);
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('value is valid', function () {
-        expect(result[0].value).to.equal('config1=value1\nconfig2=value2\n');
-      });
-      it('recommendedValue is valid', function () {
-        expect(result[0].recommendedValue).to.equal('config1=value1\nconfig2=value2\n');
-      });
-    });
-
-    describe('three config into textarea', function () {
-      var newConfigs = dummyCopy(configs);
-      newConfigs.push(dummyCopy(c3));
-      var result;
-      beforeEach(function () {
-        result = App.config.fileConfigsIntoTextarea(newConfigs, filename);
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('valid is valid', function () {
-        expect(result[0].value).to.equal('config1=value1\nconfig2=value2\nconfig3=value3\n');
-      });
-      it('recommendedValue is valid', function () {
-        expect(result[0].recommendedValue).to.equal('config1=value1\nconfig2=value2\nconfig3=value3\n');
-      });
-    });
-
-    describe('one of three configs has different filename', function () {
-      var newConfigs = dummyCopy(configs);
-      newConfigs.push(dummyCopy(c3));
-      newConfigs[1].filename = 'another filename';
-      var result;
-
-      beforeEach(function () {
-        result = App.config.fileConfigsIntoTextarea(newConfigs, filename);
-      });
-
-      it('Two configs are returned', function () {
-        //result contains two configs: one with different filename and one textarea config
-        expect(result.length).to.equal(2);
-      });
-      it('Value is valid', function () {
-        expect(result[1].value).to.equal('config1=value1\nconfig3=value3\n');
-      });
-      it('RecommendedValue is valid', function () {
-        expect(result[1].recommendedValue).to.equal('config1=value1\nconfig3=value3\n');
-      });
-    });
-
-    describe('none configs into empty textarea', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.fileConfigsIntoTextarea([], 'capacity-scheduler.xml');
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('value is empty', function () {
-        expect(result[0].value).to.equal('');
-      });
-      it('recommendedValue is none', function () {
-        expect(Em.isNone(result[0].recommendedValue)).to.be.true;
-      });
-      it('savedValue is none', function () {
-        expect(Em.isNone(result[0].savedValue)).to.be.true;
-      });
-    });
-
-    describe("filename has configs that shouldn't be included in textarea", function () {
-      var newConfigs = dummyCopy(configs);
-      newConfigs.push(dummyCopy(c3));
-      var result;
-      beforeEach(function () {
-        result = App.config.fileConfigsIntoTextarea(newConfigs, 'capacity-scheduler.xml', [c3]);
-      });
-      it('Two configs are returned', function () {
-        expect(result.length).to.equal(2);
-      });
-      it('value is correct', function () {
-        expect(result[1].value).to.equal('config1=value1\nconfig2=value2\n');
-      });
-      it('recommendedValue is correct', function () {
-        expect(result[1].recommendedValue).to.equal('config1=value1\nconfig2=value2\n');
-      });
-      it('skipped config is correct', function () {
-        expect(newConfigs.findProperty('name', 'config3')).to.eql(c3);
-      });
-    });
-
-  });
-
-  describe('#textareaIntoFileConfigs', function () {
-    var filename = 'capacity-scheduler.xml';
-    var testData = [
-      {
-        configs: [Em.Object.create({
-          "name": "capacity-scheduler",
-          "value": "config1=value1",
-          "filename": "capacity-scheduler.xml",
-          "isRequiredByAgent": true
-        })]
-      },
-      {
-        configs: [Em.Object.create({
-          "name": "capacity-scheduler",
-          "value": "config1=value1\nconfig2=value2\n",
-          "filename": "capacity-scheduler.xml",
-          "isRequiredByAgent": false
-        })]
-      },
-      {
-        configs: [Em.Object.create({
-          "name": "capacity-scheduler",
-          "value": "config1=value1,value2\n",
-          "filename": "capacity-scheduler.xml",
-          "isRequiredByAgent": true
-        })]
-      },
-      {
-        configs: [Em.Object.create({
-          "name": "capacity-scheduler",
-          "value": "config1=value1 config2=value2\n",
-          "filename": "capacity-scheduler.xml",
-          "isRequiredByAgent": false
-        })]
-      }
-    ];
-
-    describe('config1=value1 to one config', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.textareaIntoFileConfigs(testData[0].configs, filename);
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('value is correct', function () {
-        expect(result[0].value).to.equal('value1');
-      });
-      it('name is correct', function () {
-        expect(result[0].name).to.equal('config1');
-      });
-      it('isRequiredByAgent is true', function () {
-        expect(result[0].isRequiredByAgent).to.be.true;
-      });
-    });
-
-    describe('config1=value1\\nconfig2=value2\\n to two configs', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.textareaIntoFileConfigs(testData[1].configs, filename);
-      });
-      it('Two configs are returned', function (){
-        expect(result.length).to.equal(2);
-      });
-      it('1st value is valid', function (){
-        expect(result[0].value).to.equal('value1');
-      });
-      it('1st name is valid', function (){
-        expect(result[0].name).to.equal('config1');
-      });
-      it('2nd value is valid', function (){
-        expect(result[1].value).to.equal('value2');
-      });
-      it('2nd name is valid', function (){
-        expect(result[1].name).to.equal('config2');
-      });
-      it('1st isRequiredByAgent is false', function (){
-        expect(result[0].isRequiredByAgent).to.be.false;
-      });
-      it('2nd isRequiredByAgent is false', function (){
-        expect(result[1].isRequiredByAgent).to.be.false;
-      });
-    });
-
-    describe('config1=value1,value2\n to one config', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.textareaIntoFileConfigs(testData[2].configs, filename);
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('value is correct', function () {
-        expect(result[0].value).to.equal('value1,value2');
-      });
-      it('name is correct', function () {
-        expect(result[0].name).to.equal('config1');
-      });
-      it('isRequiredByAgent is true', function () {
-        expect(result[0].isRequiredByAgent).to.be.true;
-      });
-    });
-
-    describe('config1=value1 config2=value2 to two configs', function () {
-      var result;
-      beforeEach(function () {
-        result = App.config.textareaIntoFileConfigs(testData[3].configs, filename);
-      });
-      it('One config is returned', function () {
-        expect(result.length).to.equal(1);
-      });
-      it('isRequiredByAgent is false', function () {
-        expect(result[0].isRequiredByAgent).to.be.false;
-      });
-    });
-
-  });
-
   describe('#trimProperty',function() {
     var testMessage = 'displayType `{0}`, value `{1}`{3} should return `{2}`';
     var tests = [


[23/33] ambari git commit: AMBARI-15074 Combo Search: Implement "host" related filters (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15074 Combo Search: Implement "host" related filters (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f11d76db
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f11d76db
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f11d76db

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f11d76db25714e0bea2b52f907051664ee160aaa
Parents: 4761fe7
Author: Richard Zang <rz...@apache.org>
Authored: Wed Feb 17 13:51:07 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Wed Feb 17 13:51:07 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/controllers/main/host.js         | 18 +++++++
 .../controllers/main/host/combo_search_box.js   | 28 +---------
 .../app/views/main/host/combo_search_box.js     | 55 +++++++++++++++++++-
 3 files changed, 74 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f11d76db/ambari-web/app/controllers/main/host.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host.js b/ambari-web/app/controllers/main/host.js
index a38b2db..d3574b2 100644
--- a/ambari-web/app/controllers/main/host.js
+++ b/ambari-web/app/controllers/main/host.js
@@ -100,6 +100,21 @@ App.MainHostController = Em.ArrayController.extend(App.TableServerMixin, {
       type: 'MULTIPLE'
     },
     {
+      name: 'hostComponents2',
+      key: 'host_components/HostRoles/component_name',
+      type: 'MATCH'
+    },
+    {
+      name: 'services',
+      key: 'host_components/HostRoles/service_name',
+      type: 'MATCH'
+    },
+    {
+      name: 'state',
+      key: 'host_components/HostRoles/state',
+      type: 'MATCH'
+    },
+    {
       name: 'healthClass',
       key: 'Hosts/host_status',
       type: 'EQUAL'
@@ -544,6 +559,9 @@ App.MainHostController = Em.ArrayController.extend(App.TableServerMixin, {
     associations[10] = 'selected';
     associations[11] = 'hostStackVersion';
     associations[12] = 'rack';
+    associations[13] = 'services';
+    associations[14] = 'state';
+    associations[15] = 'hostComponents2';
     return associations;
   }.property()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f11d76db/ambari-web/app/controllers/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/combo_search_box.js b/ambari-web/app/controllers/main/host/combo_search_box.js
index f4cc50f..0aeb31b 100644
--- a/ambari-web/app/controllers/main/host/combo_search_box.js
+++ b/ambari-web/app/controllers/main/host/combo_search_box.js
@@ -24,23 +24,6 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
   page_size: 10,
 
   VSCallbacks : {
-    search: function (query, searchCollection) {
-      var $query = $('#search_query');
-      var count = searchCollection.size();
-      $query.stop().animate({opacity: 1}, {duration: 300, queue: false});
-      $query.html('<span class="raquo">&raquo;</span> You searched for: ' +
-      '<b>' + (query || '<i>nothing</i>') + '</b>. ' +
-      '(' + count + ' facet' + (count == 1 ? '' : 's') + ')');
-      clearTimeout(window.queryHideDelay);
-      window.queryHideDelay = setTimeout(function () {
-        $query.animate({
-          opacity: 0
-        }, {
-          duration: 1000,
-          queue: false
-        });
-      }, 2000);
-    },
 
     facetMatches: function (callback) {
       callback([
@@ -57,6 +40,7 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
 
     valueMatches: function (facet, searchTerm, callback) {
       var controller = App.router.get('mainHostComboSearchBoxController');
+      var category_mocks = require('data/host/categories');
       switch (facet) {
         case 'host_name':
         case 'ip':
@@ -71,15 +55,7 @@ App.MainHostComboSearchBoxController = Em.Controller.extend({
           callback(App.StackVersion.find().toArray().mapProperty('name'));
           break;
         case 'health':
-          callback([
-            Em.I18n.t('hosts.host.healthStatusCategory.green'),
-            Em.I18n.t('hosts.host.healthStatusCategory.red'),
-            Em.I18n.t('hosts.host.healthStatusCategory.orange'),
-            Em.I18n.t('hosts.host.healthStatusCategory.yellow'),
-            Em.I18n.t('hosts.host.alerts.label'),
-            Em.I18n.t('common.restart'),
-            Em.I18n.t('common.passive_state')
-          ]);
+          callback(category_mocks.slice(1).mapProperty('healthStatus'), {preserveOrder: true});
           break;
         case 'service':
           callback(App.Service.find().toArray().mapProperty('serviceName'), {preserveOrder: true});

http://git-wip-us.apache.org/repos/asf/ambari/blob/f11d76db/ambari-web/app/views/main/host/combo_search_box.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/combo_search_box.js b/ambari-web/app/views/main/host/combo_search_box.js
index 0ab6029..93c2b9b 100644
--- a/ambari-web/app/views/main/host/combo_search_box.js
+++ b/ambari-web/app/views/main/host/combo_search_box.js
@@ -25,6 +25,59 @@ App.MainHostComboSearchBoxView = Em.View.extend({
   },
 
   initVS: function() {
+    var self = this;
+
+    var callbacks = this.get('controller').VSCallbacks;
+    callbacks.search = function (query, searchCollection) {
+
+      searchCollection.models.forEach(function (data) {
+        var query = data.attributes;
+
+        switch (query.category) {
+          case 'health':
+            self.get('parentView').get('parentView').updateFilter(0, query.value, 'string');
+            break;
+          case 'host_name':
+            self.get('parentView').get('parentView').updateFilter(1, query.value, 'string');
+            break;
+          case 'ip':
+            self.get('parentView').get('parentView').updateFilter(2, query.value, 'string');
+            break;
+          case 'rack':
+            self.get('parentView').get('parentView').updateFilter(12, query.value, 'string');
+            break;
+          case 'version':
+            self.get('parentView').get('parentView').updateFilter(11, query.value, 'string');
+            break;
+          case 'component':
+            self.get('parentView').get('parentView').updateFilter(15, query.value, 'string');
+            break;
+          case 'service':
+            self.get('parentView').get('parentView').updateFilter(13, query.value, 'string');
+            break;
+          case 'state':
+            self.get('parentView').get('parentView').updateFilter(14, query.value, 'string');
+            break;
+        }
+      });
+
+      var $query = $('#search_query');
+      var count = searchCollection.size();
+      $query.stop().animate({opacity: 1}, {duration: 300, queue: false});
+      $query.html('<span class="raquo">&raquo;</span> You searched for: ' +
+          '<b>' + (query || '<i>nothing</i>') + '</b>. ' +
+          '(' + count + ' facet' + (count == 1 ? '' : 's') + ')');
+      clearTimeout(window.queryHideDelay);
+      window.queryHideDelay = setTimeout(function () {
+        $query.animate({
+          opacity: 0
+        }, {
+          duration: 1000,
+          queue: false
+        });
+      }, 2000);
+    };
+
     window.visualSearch = VS.init({
       container: $('#combo_search_box'),
       query: '',
@@ -33,7 +86,7 @@ App.MainHostComboSearchBoxView = Em.View.extend({
       unquotable: [
         'text'
       ],
-      callbacks: this.get('controller').VSCallbacks
+      callbacks: callbacks
     });
   }
 });
\ No newline at end of file


[04/33] ambari git commit: AMBARI-15037. When saving a SCRIPT edit, it loses the script path (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15037. When saving a SCRIPT edit, it loses the script path (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d036fc01
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d036fc01
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d036fc01

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d036fc0170521a46b4b8977ef5d2d71f3daacfb7
Parents: b900d03
Author: Richard Zang <rz...@apache.org>
Authored: Tue Feb 16 11:09:09 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Tue Feb 16 11:09:09 2016 -0800

----------------------------------------------------------------------
 .../app/controllers/main/alerts/definition_configs_controller.js | 4 ++--
 .../main/alerts/definitions_configs_controller_test.js           | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d036fc01/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index ec5680c..130434f 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -542,10 +542,10 @@ App.MainAlertDefinitionConfigsController = Em.Controller.extend({
 
     // `source.parameters` is an array and should be updated separately from other configs
     if (this.get('content.parameters.length')) {
-      propertiesToUpdate['AlertDefinition/source/parameters'] = this.get('content.rawSourceData.parameters');
+      propertiesToUpdate['AlertDefinition/source'] = this.get('content.rawSourceData');
       var parameterConfigs = this.get('configs').filterProperty('name', 'parameter');
       parameterConfigs.forEach(function (parameter) {
-        propertiesToUpdate['AlertDefinition/source/parameters'].findProperty('name', parameter.get('apiProperty')).value = parameter.get('apiFormattedValue');
+        propertiesToUpdate['AlertDefinition/source'].parameters.findProperty('name', parameter.get('apiProperty')).value = parameter.get('apiFormattedValue');
       });
     }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d036fc01/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js b/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
index c957efd..f3cf28c 100644
--- a/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
+++ b/ambari-web/test/controllers/main/alerts/definitions_configs_controller_test.js
@@ -537,8 +537,8 @@ describe('App.MainAlertDefinitionConfigsController', function () {
       });
 
       it('should update parameters', function () {
-        expect(this.result['AlertDefinition/source/parameters']).to.have.property('length').equal(4);
-        expect(this.result['AlertDefinition/source/parameters'].mapProperty('value')).to.be.eql(['v11', 'v21', 'v31', 'v41']);
+        expect(this.result['AlertDefinition/source'].parameters).to.have.property('length').equal(4);
+        expect(this.result['AlertDefinition/source'].parameters.mapProperty('value')).to.be.eql(['v11', 'v21', 'v31', 'v41']);
       });
 
     });


[09/33] ambari git commit: AMBARI-15063 : Metrics monitor fails on restart (avijayan)

Posted by nc...@apache.org.
AMBARI-15063 : Metrics monitor fails on restart (avijayan)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d412ca11
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d412ca11
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d412ca11

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d412ca11c8347fb89e7a5318c72a7dc24d0c2ff9
Parents: 21fd70c
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Tue Feb 16 14:41:00 2016 -0800
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Tue Feb 16 14:41:00 2016 -0800

----------------------------------------------------------------------
 .../conf/unix/ambari-metrics-monitor                           | 6 +++---
 .../conf/unix/ambari-metrics-collector                         | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d412ca11/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
index 815b3e4..aa4ae02 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
+++ b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
@@ -131,7 +131,7 @@ case "$1" in
     echo "Checking for previously running Metric Monitor..."
     if [ -f ${PIDFILE} ]; then
       PID=`cat ${PIDFILE}`
-      if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
+      if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
         echo "${PIDFILE} found with no process. Removing ${PID}..."
         rm -f ${PIDFILE}
       else
@@ -152,7 +152,7 @@ case "$1" in
     sleep 2
 
     echo "Verifying ${METRIC_MONITOR} process status..."
-    if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
+    if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
       if [ -s ${OUTFILE} ]; then
         echo "ERROR: ${METRIC_MONITOR} start failed. For more details, see ${OUTFILE}:"
         echo "===================="
@@ -173,7 +173,7 @@ case "$1" in
     if [ -f ${PIDFILE} ]; then
       PID=`cat ${PIDFILE}`
       echo "Found ${METRIC_MONITOR} PID: $PID"
-      if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
+      if [ -z "`ps ax -o pid | grep -w ${PID} | grep resource_monitoring`" ]; then
         echo "${METRIC_MONITOR} not running. Stale PID File at: $PIDFILE"
         retcode=2
       else

http://git-wip-us.apache.org/repos/asf/ambari/blob/d412ca11/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index f83af50..e319d73 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -267,7 +267,7 @@ function start()
   sleep 2
 
   echo "Verifying ${METRIC_COLLECTOR} process status..." | tee -a $STARTUPFILE
-  if [ -z "`ps ax -o pid | grep ${PID}`" ]; then
+  if [ -z "`ps ax | grep -w ${PID} | grep ApplicationHistoryServer`" ]; then
     if [ -s ${OUTFILE} ]; then
       echo "ERROR: ${METRIC_COLLECTOR} start failed. For more details, see ${OUTFILE}:" | tee -a $STARTUPFILE
       echo "===================="


[11/33] ambari git commit: AMBARI-14987: assign_master_component.js does not show recommendations while adding master component for a service which is already installed (mithmatt via jaoki)

Posted by nc...@apache.org.
AMBARI-14987: assign_master_component.js does not show recommendations while adding master component for a service which is already installed (mithmatt via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e4800e1c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e4800e1c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e4800e1c

Branch: refs/heads/branch-dev-patch-upgrade
Commit: e4800e1c97709e8218989080fceb0df406e744ea
Parents: 529c588
Author: Jun Aoki <ja...@apache.org>
Authored: Tue Feb 16 16:43:39 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Tue Feb 16 16:43:39 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/assets/test/tests.js             |   1 +
 .../mixins/wizard/assign_master_components.js   |  58 ++++++-
 .../wizard/assign_master_components_test.js     | 154 +++++++++++++++++++
 3 files changed, 212 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e4800e1c/ambari-web/app/assets/test/tests.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/assets/test/tests.js b/ambari-web/app/assets/test/tests.js
index 44fb4f4..0695d06 100644
--- a/ambari-web/app/assets/test/tests.js
+++ b/ambari-web/app/assets/test/tests.js
@@ -157,6 +157,7 @@ var files = [
   'test/mixins/main/service/configs/config_overridable_test',
   'test/mixins/routers/redirections_test',
   'test/mixins/wizard/addSeccurityConfigs_test',
+  'test/mixins/wizard/assign_master_components_test',
   'test/mixins/wizard/wizard_menu_view_test',
   'test/mixins/wizard/wizardProgressPageController_test',
   'test/mixins/wizard/wizardEnableDone_test',

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4800e1c/ambari-web/app/mixins/wizard/assign_master_components.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/wizard/assign_master_components.js b/ambari-web/app/mixins/wizard/assign_master_components.js
index a2440a4..645c996 100644
--- a/ambari-web/app/mixins/wizard/assign_master_components.js
+++ b/ambari-web/app/mixins/wizard/assign_master_components.js
@@ -95,6 +95,22 @@ App.AssignMasterComponents = Em.Mixin.create({
   showInstalledMastersFirst: false,
 
   /**
+   * Map of component name to list of hostnames for that component
+   * format:
+   * {
+   *   NAMENODE: [
+   *     'c6401.ambari.apache.org'
+   *   ],
+   *   DATANODE: [
+   *     'c6402.ambari.apache.org',
+   *     'c6403.ambari.apache.org',
+   *   ]
+   * }
+   * @type {Object}
+   */
+  recommendedHostsForComponents: {},
+
+  /**
    * Array of <code>servicesMasters</code> objects, that will be shown on the page
    * Are filtered using <code>mastersToShow</code>
    * @type {Array}
@@ -731,7 +747,31 @@ App.AssignMasterComponents = Em.Mixin.create({
    * @method loadRecommendationsSuccessCallback
    */
   loadRecommendationsSuccessCallback: function (data) {
-    this.set('content.recommendations', data.resources[0].recommendations);
+    var recommendations = data.resources[0].recommendations;
+    this.set('content.recommendations', recommendations);
+
+    var recommendedHostsForComponent = {};
+    var hostsForHostGroup = {};
+
+    recommendations.blueprint_cluster_binding.host_groups.forEach(function(hostGroup) {
+      hostsForHostGroup[hostGroup.name] = hostGroup.hosts.map(function(host) {
+        return host.fqdn;
+      });
+    });
+
+    recommendations.blueprint.host_groups.forEach(function (hostGroup) {
+      var components = hostGroup.components.map(function (component) {
+        return component.name;
+      });
+      components.forEach(function (componentName) {
+        var hostList = recommendedHostsForComponent[componentName] || [];
+        var hostNames = hostsForHostGroup[hostGroup.name] || [];
+        Array.prototype.push.apply(hostList, hostNames);
+        recommendedHostsForComponent[componentName] = hostList;
+      });
+    });
+
+    this.set('content.recommendedHostsForComponents', recommendedHostsForComponent);
   },
 
   /**
@@ -817,6 +857,21 @@ App.AssignMasterComponents = Em.Mixin.create({
    * @returns {*}
    */
   getHostForMaster: function (master, allMasters) {
+    var masterHostList = [];
+
+    allMasters.forEach(function (component) {
+      if (component.component_name === master) {
+        masterHostList.push(component.selectedHost);
+      }
+    });
+
+    var recommendedHostsForMaster = this.get('content.recommendedHostsForComponents')[master] || [];
+    for (var k = 0; k < recommendedHostsForMaster.length; k++) {
+      if(!masterHostList.contains(recommendedHostsForMaster[k])) {
+        return recommendedHostsForMaster[k];
+      }
+    }
+
     var usedHosts = allMasters.filterProperty('component_name', master).mapProperty('selectedHost');
     var allHosts = this.get('hosts');
     for (var i = 0; i < allHosts.length; i++) {
@@ -824,6 +879,7 @@ App.AssignMasterComponents = Em.Mixin.create({
         return allHosts[i].get('host_name');
       }
     }
+
     return false;
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e4800e1c/ambari-web/test/mixins/wizard/assign_master_components_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/wizard/assign_master_components_test.js b/ambari-web/test/mixins/wizard/assign_master_components_test.js
new file mode 100644
index 0000000..a3ffb22
--- /dev/null
+++ b/ambari-web/test/mixins/wizard/assign_master_components_test.js
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+var App = require('app');
+require('mixins/wizard/assign_master_components');
+var c;
+
+describe('App.AssignMasterComponents', function () {
+  var baseObject = Em.Object.extend(App.AssignMasterComponents);
+  var data;
+
+  beforeEach(function () {
+    c = baseObject.create();
+    c.set('content', {});
+
+    var hosts = [];
+    for(var i = 1; i <= 4; ++i) {
+      hosts.push(App.Host.createRecord({
+        'host_name': 'h' + i
+      }));
+    }
+    c.set('hosts', hosts);
+
+    data = {
+      "resources": [
+        {
+          "recommendations": {
+            "blueprint": {
+              "host_groups": [
+                {
+                  "name": "host-group-1",
+                  "components": [{"name": "c1"}, {"name": "c3"}, {"name": "c2"}]
+                },
+                {
+                  "name": "host-group-2",
+                  "components": [{"name": "c1"}, {"name": "c2"}]
+                },
+                {
+                  "name": "host-group-3",
+                  "components": [{"name": "c1"}]
+                }
+              ]
+            },
+            "blueprint_cluster_binding": {
+              "host_groups": [
+                {
+                  "name": "host-group-1",
+                  "hosts": [{"fqdn": "h1"}]
+                },
+                {
+                  "name": "host-group-3",
+                  "hosts": [{"fqdn": "h3"}]
+                },
+                {
+                  "name": "host-group-2",
+                  "hosts": [{"fqdn": "h2"}, {"fqdn": "h4"}]
+                }
+              ]
+            }
+          }
+        }
+      ]
+    };
+  });
+
+  describe('#loadRecommendationsSuccessCallback', function () {
+
+    it('should set recommendations', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      expect(c.get('content.recommendations')).to.eq(data.resources[0].recommendations);
+    });
+
+    it('should set recommendedHostsForComponents', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      var expected = {
+        "c1": ["h1", "h2", "h4", "h3"],
+        "c3": ["h1"],
+        "c2": ["h1", "h2", "h4"]
+      };
+
+      expect(c.get('content.recommendedHostsForComponents')).to.deep.equal(expected);
+    });
+  });
+
+  describe('#getHostForMaster', function () {
+
+    var allMasters;
+
+    beforeEach(function () {
+      allMasters = [
+        {
+          "component_name": "c1",
+          "selectedHost": "h1"
+        },
+        {
+          "component_name": "c1",
+          "selectedHost": "h2"
+        },
+        {
+          "component_name": "c1",
+          "selectedHost": "h3"
+        },
+        {
+          "component_name": "c1",
+          "selectedHost": "h4"
+        },
+        {
+          "component_name": "c2",
+          "selectedHost": "h1"
+        },
+        {
+          "component_name": "c5",
+          "selectedHost": "h1"
+        }
+      ];
+    });
+
+    it('should return the recommended host', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      expect(c.getHostForMaster('c2', allMasters)).to.eq('h2');
+    });
+
+    it('should return the first available host from the list of existing hosts', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      expect(c.getHostForMaster('c6', allMasters)).to.eq('h1');
+    });
+
+    it('should return the next available host from the list of existing hosts', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      expect(c.getHostForMaster('c5', allMasters)).to.eq('h2');
+    });
+
+    it('should return false if the component is already on all hosts', function() {
+      c.loadRecommendationsSuccessCallback(data);
+      expect(c.getHostForMaster('c1', allMasters)).to.eq(false);
+    });
+
+  });
+});
\ No newline at end of file


[13/33] ambari git commit: AMBARI-15071. Functional tests hang on BAO forever (aonishuk)

Posted by nc...@apache.org.
AMBARI-15071. Functional tests hang on BAO forever (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/03f804c2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/03f804c2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/03f804c2

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 03f804c24772bec7e7ccfd31d5d2f594ac0f0262
Parents: 8cfb2db
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Feb 17 14:28:05 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Feb 17 14:28:05 2016 +0200

----------------------------------------------------------------------
 ambari-funtest/pom.xml | 3 +++
 1 file changed, 3 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/03f804c2/ambari-funtest/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-funtest/pom.xml b/ambari-funtest/pom.xml
index 4fa342f..8d19631 100644
--- a/ambari-funtest/pom.xml
+++ b/ambari-funtest/pom.xml
@@ -22,6 +22,9 @@
   <packaging>${packagingFormat}</packaging>
   <name>Ambari Functional Tests</name>
   <description>Ambari Functional Tests</description>
+  <properties>
+    <maven.test.skip>true</maven.test.skip> <!-- Don't run tests by default. Run with -Dmaven.test.skip=false to run tests -->
+  </properties>
   <build>
     <plugins>
       <plugin>


[18/33] ambari git commit: AMBARI-15072. Failures in Express Upgrade from Dal M20 to Derg due to missing modules (aonishuk)

Posted by nc...@apache.org.
AMBARI-15072. Failures in Express Upgrade from Dal M20 to Derg due to missing modules (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d4b67b1f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d4b67b1f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d4b67b1f

Branch: refs/heads/branch-dev-patch-upgrade
Commit: d4b67b1fd39fdbdc05c8f0d9449d0c0156befa6c
Parents: 7d06e8b
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Wed Feb 17 19:58:17 2016 +0200
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Wed Feb 17 19:58:17 2016 +0200

----------------------------------------------------------------------
 .../main/resources/custom_actions/scripts/ru_execute_tasks.py    | 2 +-
 .../src/test/python/custom_actions/test_ru_execute_tasks.py      | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d4b67b1f/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
index 928fef8..8e526c5 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_execute_tasks.py
@@ -144,7 +144,7 @@ class ExecuteUpgradeTasks(Script):
                             self.logging_level,
                             Script.get_tmp_dir()]
 
-          task.command = " ".join(command_params)
+          task.command = "source /var/lib/ambari-agent/ambari-env.sh ; " + " ".join(command_params)
           # Replace redundant whitespace to make the unit tests easier to validate
           task.command = re.sub("\s+", " ", task.command).strip()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4b67b1f/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py b/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
index 31c1030..6147b87 100644
--- a/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
+++ b/ambari-server/src/test/python/custom_actions/test_ru_execute_tasks.py
@@ -129,7 +129,7 @@ class TestRUExecuteTasks(RMFTestCase):
     ru_execute.actionexecute(None)
 
     call_mock.assert_called_with(
-        "/usr/bin/ambari-python-wrap /var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package" + os.sep +
+        "source /var/lib/ambari-agent/ambari-env.sh ; /usr/bin/ambari-python-wrap /var/lib/ambari-agent/cache/common-services/HDFS/2.1.0.2.0/package" + os.sep +
         "scripts/namenode.py prepare_rolling_upgrade /tmp", logoutput=True, quiet=True)
     pass
 
@@ -176,6 +176,6 @@ class TestRUExecuteTasks(RMFTestCase):
     ru_execute = ExecuteUpgradeTasks()
     ru_execute.actionexecute(None)
 
-    call_mock.assert_called_with("/usr/bin/ambari-python-wrap /var/lib/ambari-agent/cache/custom_actions" + os.sep +
+    call_mock.assert_called_with("source /var/lib/ambari-agent/ambari-env.sh ; /usr/bin/ambari-python-wrap /var/lib/ambari-agent/cache/custom_actions" + os.sep +
                                  "scripts/namenode.py prepare_rolling_upgrade /tmp", logoutput=True, quiet=True)
     pass


[28/33] ambari git commit: AMBARI-15033. RU/EU from HDP 2.2 to 2.3 or 2.4 to remove deprecated kafka-broker port property (alejandro)

Posted by nc...@apache.org.
AMBARI-15033. RU/EU from HDP 2.2 to 2.3 or 2.4 to remove deprecated kafka-broker port property (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b53a13fe
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b53a13fe
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b53a13fe

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b53a13fe6a96eb8e620f0ee86643c8b96ed19c77
Parents: 8999aee
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Feb 17 15:30:56 2016 -0800
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Feb 17 16:24:02 2016 -0800

----------------------------------------------------------------------
 .../stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml          | 3 +--
 .../stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml          | 3 +--
 .../src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml  | 2 ++
 .../src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml  | 2 ++
 .../HDP/2.3/services/KAFKA/configuration/kafka-broker.xml       | 2 +-
 .../main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml   | 5 +++--
 .../stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml          | 3 +--
 .../src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml  | 4 ++++
 .../main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml   | 5 +++--
 9 files changed, 18 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
index 84da86c..5c6c456 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml
@@ -526,9 +526,8 @@
       </execute-stage>
 
       <!-- KAFKA  -->
-
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka">
-        <task xsi:type="configure" id ="hdp_2_3_0_0_kafka_broker_listeners"/>
+        <task xsi:type="configure" id ="hdp_2_3_0_0_kafka_broker_deprecate_port"/>
       </execute-stage>
 
     </group>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
index c0eae48..eca1789 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml
@@ -564,9 +564,8 @@
       </execute-stage>
 
       <!-- KAFKA  -->
-
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka">
-        <task xsi:type="configure" id ="hdp_2_4_0_0_kafka_broker_listeners"/>
+        <task xsi:type="configure" id ="hdp_2_4_0_0_kafka_broker_deprecate_port"/>
       </execute-stage>
 
     </group>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
index 353dc86..61540b7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
@@ -811,6 +811,8 @@
             <script>scripts/kafka_broker.py</script>
             <function>stop</function>
           </task>
+
+          <task xsi:type="configure" id="hdp_2_3_0_0_kafka_broker_deprecate_port"/>
         </pre-upgrade>
 
         <upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
index cfd2904..5c91764 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.4.xml
@@ -824,6 +824,8 @@
             <script>scripts/kafka_broker.py</script>
             <function>stop</function>
           </task>
+
+          <task xsi:type="configure" id="hdp_2_4_0_0_kafka_broker_deprecate_port"/>
         </pre-upgrade>
 
         <upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml
index 6e924e2..8802f13 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/KAFKA/configuration/kafka-broker.xml
@@ -23,7 +23,7 @@
     <name>listeners</name>
     <value>PLAINTEXT://localhost:6667</value>
     <property-type>DONT_ADD_ON_UPGRADE</property-type>
-    <description>host and port where kafka broker will be accepting connnections. localhost will be subsituted with hostname.</description>
+    <description>host and port where kafka broker will be accepting connections. localhost will be substituted with hostname.</description>
   </property>
   <property>
     <name>controlled.shutdown.enable</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
index b8963bc..2f042b1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/config-upgrade.xml
@@ -893,9 +893,10 @@
     <service name="KAFKA">
       <component name="KAFKA_BROKER">
         <changes>
-          <definition xsi:type="configure" id="hdp_2_3_0_0_kafka_broker_listeners">
+          <definition xsi:type="configure" id="hdp_2_3_0_0_kafka_broker_deprecate_port">
             <type>kafka-broker</type>
-            <set key="listeners" value="PLAINTEXT://localhost:6667"/>
+            <!-- Deprecate "port" property since "listeners" will be added. -->
+            <transfer operation="delete" delete-key="port"/>
           </definition>
         </changes>
       </component>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
index 4e148d5..67bc0e7 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml
@@ -316,9 +316,8 @@
       </execute-stage>
 
       <!-- KAFKA  -->
-      <!--TODO: remove? Used for non-rolling upgrade only-->
       <execute-stage service="KAFKA" component="KAFKA_BROKER" title="Apply config changes for Kafka">
-        <task xsi:type="configure" id ="hdp_2_4_0_0_kafka_broker_listeners"/>
+        <task xsi:type="configure" id ="hdp_2_4_0_0_kafka_broker_deprecate_port"/>
       </execute-stage>
 
     </group>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
index 59f9389..42148b0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.4.xml
@@ -740,6 +740,10 @@
 
     <service name="KAFKA">
       <component name="KAFKA_BROKER">
+        <pre-upgrade>
+          <task xsi:type="configure" id ="hdp_2_4_0_0_kafka_broker_deprecate_port"/>
+        </pre-upgrade>
+        
         <upgrade>
           <task xsi:type="restart-task" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/b53a13fe/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
index ee06577..1cc2668 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/config-upgrade.xml
@@ -157,9 +157,10 @@
     <service name="KAFKA">
       <component name="KAFKA_BROKER">
         <changes>
-          <definition xsi:type="configure" id="hdp_2_4_0_0_kafka_broker_listeners">
+          <definition xsi:type="configure" id="hdp_2_4_0_0_kafka_broker_deprecate_port">
             <type>kafka-broker</type>
-            <set key="listeners" value="PLAINTEXT://localhost:6667"/>
+            <!-- Deprecate "port" property since "listeners" will be added. -->
+            <transfer operation="delete" delete-key="port"/>
           </definition>
         </changes>
       </component>


[15/33] ambari git commit: AMBARI-14875. Ranger Smart configs : Need to hide Audit DB properties if Audit to DB is off. (addendum patch) (jaimin)

Posted by nc...@apache.org.
AMBARI-14875. Ranger Smart configs : Need to hide Audit DB properties if Audit to DB is off. (addendum patch) (jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/506bb8d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/506bb8d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/506bb8d1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 506bb8d186500ba700bc3f58f1520f105876e0c4
Parents: b339542
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Wed Feb 17 20:16:40 2016 +0530
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Wed Feb 17 20:18:15 2016 +0530

----------------------------------------------------------------------
 .../services/RANGER/themes/theme_version_2.json | 20 +++++++++++++++++++-
 1 file changed, 19 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/506bb8d1/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
index 59e58a4..cbd27e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/RANGER/themes/theme_version_2.json
@@ -940,7 +940,25 @@
         },
         {
           "config": "admin-properties/audit_db_password",
-          "subsection-name": "subsection-ranger-audit-db-row2-col2"
+          "subsection-name": "subsection-ranger-audit-db-row2-col2",
+          "depends-on": [
+            {
+              "configs":[
+                "ranger-env/xasecure.audit.destination.db"
+              ],
+              "if": "${ranger-env/xasecure.audit.destination.db}",
+              "then": {
+                "property_value_attributes": {
+                  "visible": true
+                }
+              },
+              "else": {
+                "property_value_attributes": {
+                  "visible": false
+                }
+              }
+            }
+          ]
         },
         {
           "config": "ranger-env/xasecure.audit.destination.solr",


[30/33] ambari git commit: AMBARI-15081: Ambari functional tests: Build issues - tests should not run during regular build; JAR file not required.

Posted by nc...@apache.org.
AMBARI-15081: Ambari functional tests: Build issues - tests should not run during regular build; JAR file not required.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/84ca6a88
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/84ca6a88
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/84ca6a88

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 84ca6a88d0ba80011394a0ab767a80c7e342aa3c
Parents: dcebcca
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Wed Feb 17 18:00:07 2016 -0800
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Wed Feb 17 22:39:49 2016 -0800

----------------------------------------------------------------------
 ambari-funtest/pom.xml                         | 16 +----
 ambari-funtest/src/main/assemblies/empty.xml   | 22 ++++++
 ambari-funtest/src/main/assemblies/funtest.xml | 79 ---------------------
 3 files changed, 24 insertions(+), 93 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/84ca6a88/ambari-funtest/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-funtest/pom.xml b/ambari-funtest/pom.xml
index 8d19631..28225c2 100644
--- a/ambari-funtest/pom.xml
+++ b/ambari-funtest/pom.xml
@@ -22,9 +22,6 @@
   <packaging>${packagingFormat}</packaging>
   <name>Ambari Functional Tests</name>
   <description>Ambari Functional Tests</description>
-  <properties>
-    <maven.test.skip>true</maven.test.skip> <!-- Don't run tests by default. Run with -Dmaven.test.skip=false to run tests -->
-  </properties>
   <build>
     <plugins>
       <plugin>
@@ -62,15 +59,6 @@
           <forkMode>once</forkMode>
         </configuration>
         <executions>
-          <!-- Will display BUILD SUCCESSFUL if build is successful.
-               Does not matter if the tests fail -->
-          <execution>
-            <id>run-integration-tests</id>
-            <phase>test</phase>
-            <goals>
-              <goal>integration-test</goal>
-            </goals>
-          </execution>
           <!-- Will display BUILD FAILURE if build fails or any test fails -->
           <execution>
             <id>run-verify</id>
@@ -128,7 +116,7 @@
         <executable.shell>sh</executable.shell>
         <fileextension.shell>sh</fileextension.shell>
         <fileextension.dot.shell-default></fileextension.dot.shell-default>
-        <assemblydescriptor>src/main/assemblies/funtest.xml</assemblydescriptor>
+        <assemblydescriptor>src/main/assemblies/empty.xml</assemblydescriptor>
         <packagingFormat>jar</packagingFormat>
       </properties>
     </profile>
@@ -146,7 +134,7 @@
         <executable.shell>cmd</executable.shell>
         <fileextension.shell>cmd</fileextension.shell>
         <fileextension.dot.shell-default></fileextension.dot.shell-default>
-        <assemblydescriptor>src/main/assemblies/funtest.xml</assemblydescriptor>
+        <assemblydescriptor>src/main/assemblies/empty.xml</assemblydescriptor>
         <packagingFormat>jar</packagingFormat>
         </properties>
     </profile>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84ca6a88/ambari-funtest/src/main/assemblies/empty.xml
----------------------------------------------------------------------
diff --git a/ambari-funtest/src/main/assemblies/empty.xml b/ambari-funtest/src/main/assemblies/empty.xml
new file mode 100644
index 0000000..9226f2c
--- /dev/null
+++ b/ambari-funtest/src/main/assemblies/empty.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+  
+       http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly>
+  <id>empty</id>
+  <formats/>
+</assembly>

http://git-wip-us.apache.org/repos/asf/ambari/blob/84ca6a88/ambari-funtest/src/main/assemblies/funtest.xml
----------------------------------------------------------------------
diff --git a/ambari-funtest/src/main/assemblies/funtest.xml b/ambari-funtest/src/main/assemblies/funtest.xml
deleted file mode 100644
index 93d2e17..0000000
--- a/ambari-funtest/src/main/assemblies/funtest.xml
+++ /dev/null
@@ -1,79 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<assembly>
-  <id>dist</id>
-  <formats>
-    <format>dir</format>
-    <format>tar.gz</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <files>
-    <file>
-      <source>${project.build.directory}/${artifact.artifactId}-${artifact.version}.jar</source>
-      <outputDirectory>ambari-funtest-${project.version}/lib/ambari-funtest</outputDirectory>
-    </file>
-  </files>
-  <fileSets>
-    <!-- Distro files, readme, licenses, etc -->
-    <fileSet>
-      <directory>${basedir}/../</directory>
-      <outputDirectory>ambari-funtest-${project.version}/</outputDirectory>
-      <includes>
-        <include>*.txt</include>
-      </includes>
-    </fileSet>
-    <!--
-    <fileSet>
-      <directory>${basedir}/src/main/bin</directory>
-      <outputDirectory>ambari-funtest-${project.version}/bin</outputDirectory>
-      <includes>
-        <include>*</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    -->
-    <fileSet>
-      <directory>${basedir}/src/main/resources/</directory>
-      <outputDirectory>/ambari-funtest-${project.version}/keystore</outputDirectory>
-      <includes>
-        <include>db/*</include>
-        <include>ca.config</include>
-        <include>pass.txt</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>${basedir}/../ambari-web/public</directory>
-      <outputDirectory>ambari-funtest-${project.version}/web</outputDirectory>
-      <includes>
-        <include>**</include>
-      </includes>
-    </fileSet>
-    <fileSet>
-      <directory>src/main/conf</directory>
-      <outputDirectory>/ambari-funtest-${project.version}/etc/ambari-funtest/conf</outputDirectory>
-    </fileSet>
-  </fileSets>
-  <dependencySets>
-    <dependencySet>
-      <outputDirectory>ambari-funtest-${project.version}/lib/ambari-funtest</outputDirectory>
-      <unpack>false</unpack>
-      <scope>compile</scope>
-    </dependencySet>
-  </dependencySets>
-</assembly>


[29/33] ambari git commit: AMBARI-14881: Enable/Disable Custom Commands on Host Component page (goutamtadi via jaoki)

Posted by nc...@apache.org.
AMBARI-14881: Enable/Disable Custom Commands on Host Component page (goutamtadi via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dcebcca6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dcebcca6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dcebcca6

Branch: refs/heads/branch-dev-patch-upgrade
Commit: dcebcca60d47743176519b7758b86505263e39a7
Parents: b53a13f
Author: Jun Aoki <ja...@apache.org>
Authored: Wed Feb 17 18:52:38 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Wed Feb 17 18:52:38 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/templates/main/host/details/host_component.hbs | 2 +-
 ambari-web/app/views/main/host/details/host_component_view.js | 7 ++++---
 2 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dcebcca6/ambari-web/app/templates/main/host/details/host_component.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/host/details/host_component.hbs b/ambari-web/app/templates/main/host/details/host_component.hbs
index 00db8d1..60031de 100644
--- a/ambari-web/app/templates/main/host/details/host_component.hbs
+++ b/ambari-web/app/templates/main/host/details/host_component.hbs
@@ -143,7 +143,7 @@
         {{/if}}
 
       {{#each command in view.customCommands}}
-        <li>
+        <li {{bindAttr class="command.disabled:disabled"}}>
           <a href="javascript:void(null)" {{action "executeCustomCommand" command target="controller" href=true}}>{{command.label}}</a>
         </li>
       {{/each}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/dcebcca6/ambari-web/app/views/main/host/details/host_component_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/host/details/host_component_view.js b/ambari-web/app/views/main/host/details/host_component_view.js
index 6205604..d059498 100644
--- a/ambari-web/app/views/main/host/details/host_component_view.js
+++ b/ambari-web/app/views/main/host/details/host_component_view.js
@@ -311,14 +311,15 @@ App.HostComponentView = Em.View.extend({
         return;
       }
 
-      var isContextPresent = command in App.HostComponentActionMap.getMap(self) && App.HostComponentActionMap.getMap(self)[command].context;
+      var commandMap = App.HostComponentActionMap.getMap(self)[command];
       customCommands.push({
         label: self.getCustomCommandLabel(command),
         service: component.get('serviceName'),
         hosts: hostComponent.get('hostName'),
-        context: isContextPresent ? App.HostComponentActionMap.getMap(self)[command].context : null,
+        context: (!!commandMap && !!commandMap.context) ? commandMap.context : null,
         component: component.get('componentName'),
-        command: command
+        command: command,
+        disabled: !!commandMap ? !!commandMap.disabled : false
       });
     });
 


[05/33] ambari git commit: AMBARI-15038 Alert definitions: Percentage params get validation errors when they shouldn't (Joe Wang via rzang)

Posted by nc...@apache.org.
AMBARI-15038 Alert definitions: Percentage params get validation errors when they shouldn't (Joe Wang via rzang)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f18601c7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f18601c7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f18601c7

Branch: refs/heads/branch-dev-patch-upgrade
Commit: f18601c7c786945e0da8883e59b65686338ee2c9
Parents: d036fc0
Author: Richard Zang <rz...@apache.org>
Authored: Tue Feb 16 11:11:54 2016 -0800
Committer: Richard Zang <rz...@apache.org>
Committed: Tue Feb 16 11:11:54 2016 -0800

----------------------------------------------------------------------
 ambari-web/app/models/alerts/alert_config.js       | 4 ++--
 ambari-web/test/models/alerts/alert_config_test.js | 8 ++++----
 2 files changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f18601c7/ambari-web/app/models/alerts/alert_config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/alerts/alert_config.js b/ambari-web/app/models/alerts/alert_config.js
index c86b19a..867b45a 100644
--- a/ambari-web/app/models/alerts/alert_config.js
+++ b/ambari-web/app/models/alerts/alert_config.js
@@ -532,7 +532,7 @@ App.AlertConfigProperties.Parameters = {
       value = String(value).trim();
       value = parseFloat(value);
 
-      return !isNaN(value) && value > 0 && value <= 100;
+      return !isNaN(value) && value > 0;
     }.property('value')
   })
 
@@ -622,7 +622,7 @@ App.AlertConfigProperties.Thresholds = {
         return false;
       }
 
-      return this.get('showInputForValue') ? !isNaN(value) && value > 0 && value <= 100 : true;
+      return this.get('showInputForValue') ? !isNaN(value) && value > 0 : true;
     }.property('displayValue', 'showInputForValue'),
 
     /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/f18601c7/ambari-web/test/models/alerts/alert_config_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/models/alerts/alert_config_test.js b/ambari-web/test/models/alerts/alert_config_test.js
index 4b788f8..56f1577 100644
--- a/ambari-web/test/models/alerts/alert_config_test.js
+++ b/ambari-web/test/models/alerts/alert_config_test.js
@@ -104,13 +104,13 @@ describe('App.AlertConfigProperties', function () {
           {value: 'abc', expected: false},
           {value: 'g1', expected: false},
           {value: '1g', expected: false},
-          {value: '123', expected: false},
+          {value: '123', expected: true},
           {value: '23', expected: true},
-          {value: '123.8', expected: false},
+          {value: '123.8', expected: true},
           {value: '5.8', expected: true},
-          {value: 123, expected: false},
+          {value: 123, expected: true},
           {value: 23, expected: true},
-          {value: 123.8, expected: false},
+          {value: 123.8, expected: true},
           {value: 5.8, expected: true}
         ]).forEach(function (test) {
           it('value: ' + JSON.stringify(test.value) + ' ;result - ' + test.expected, function () {


[24/33] ambari git commit: AMBARI-15019: Refactor HAWQ common.py after moving config substitution to UI (lavjain via jaoki)

Posted by nc...@apache.org.
AMBARI-15019: Refactor HAWQ common.py after moving config substitution to UI (lavjain via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b15c0f3d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b15c0f3d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b15c0f3d

Branch: refs/heads/branch-dev-patch-upgrade
Commit: b15c0f3d7432dbd6b6ce6c678b6cc2884a73c68b
Parents: f11d76d
Author: Jun Aoki <ja...@apache.org>
Authored: Wed Feb 17 14:46:58 2016 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Wed Feb 17 14:46:58 2016 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0/package/scripts/common.py        | 63 ++++----------------
 .../HAWQ/2.0.0/package/scripts/params.py        | 19 ++++--
 2 files changed, 26 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b15c0f3d/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
index 23342f5..b5353e8 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
@@ -20,9 +20,7 @@ import os
 import time
 import crypt
 import filecmp
-from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.core.resources.system import Execute, Directory, File
-from resource_management.libraries.script.config_dictionary import ConfigDictionary
 from resource_management.core.logger import Logger
 from resource_management.core.system import System
 from resource_management.core.exceptions import Fail
@@ -67,58 +65,23 @@ def setup_common_configurations():
   """
   Sets up the config files common to master, standby and segment nodes.
   """
-  __update_hdfs_client()
-  __update_yarn_client()
-  __update_hawq_site()
-  __set_osparams()
-
-def __update_hdfs_client():
-  """
-  Writes hdfs-client.xml on the local filesystem on hawq nodes.
-  If hdfs ha is enabled, appends related parameters to hdfs-client.xml
-  """
-  import params
-
-  hdfs_client_dict = params.hdfs_client.copy()
-  
-  XmlConfig("hdfs-client.xml",
-            conf_dir=hawq_constants.hawq_config_dir,
-            configurations=ConfigDictionary(hdfs_client_dict),
-            configuration_attributes=params.config['configuration_attributes']['hdfs-client'],
-            owner=hawq_constants.hawq_user,
-            group=hawq_constants.hawq_group,
-            mode=0644)
-
-
-def __update_yarn_client():
-  """
-  Writes yarn-client.xml on the local filesystem on hawq nodes.
-  If yarn ha is enabled, appends related parameters to yarn-client.xml
-  """
   import params
 
-  XmlConfig("yarn-client.xml",
-            conf_dir=hawq_constants.hawq_config_dir,
-            configurations=params.yarn_client,
-            configuration_attributes=params.config['configuration_attributes']['yarn-client'],
-            owner=hawq_constants.hawq_user,
-            group=hawq_constants.hawq_group,
-            mode=0644)
+  # Write hdfs-client.xml on the local filesystem. If hdfs HA is enabled, append related parameters
+  params.XmlConfig(filename="hdfs-client.xml",
+                   configurations=params.hdfs_client,
+                   configuration_attributes=params.config_attrs['hdfs-client'])
 
+  # Write yarn-client.xml on the local filesystem. If yarn HA is enabled, append related parameters
+  params.XmlConfig(filename="yarn-client.xml",
+                   configurations=params.yarn_client,
+                   configuration_attributes=params.config_attrs['yarn-client'])
 
-def __update_hawq_site():
-  """
-  Sets up hawq-site.xml
-  """
-  import params
-  
-  XmlConfig("hawq-site.xml",
-            conf_dir=hawq_constants.hawq_config_dir,
-            configurations=ConfigDictionary(params.hawq_site),
-            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
-            owner=hawq_constants.hawq_user,
-            group=hawq_constants.hawq_group,
-            mode=0644)
+  # Write hawq-site.xml on the local filesystem.
+  params.XmlConfig(filename="hawq-site.xml",
+                   configurations=params.hawq_site,
+                   configuration_attributes=params.config_attrs['hawq-site'])
+  __set_osparams()
 
 
 def __set_osparams():

http://git-wip-us.apache.org/repos/asf/ambari/blob/b15c0f3d/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 48b933e..74c9813 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -16,16 +16,16 @@ See the License for the specific language governing permissions and
 limitations under the License.
 """
 
-import os
 import functools
-from hawq_constants import PXF_PORT, pxf_hdfs_test_dir
+import hawq_constants
 from resource_management import Script
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.functions import get_kinit_path
 
 config = Script.get_config()
-
+config_attrs = config['configuration_attributes']
 
 def __get_component_host(component):
   """
@@ -76,9 +76,16 @@ HdfsResource = functools.partial(HdfsResource,
                                  default_fs=default_fs)
 
 
+# XMLConfig partial function
+XmlConfig = functools.partial(XmlConfig,
+                              conf_dir=hawq_constants.hawq_config_dir,
+                              owner=hawq_constants.hawq_user,
+                              group=hawq_constants.hawq_group,
+                              mode=0644)
+
 # For service Check
 is_pxf_installed = __get_component_host("pxf_hosts") is not None
-namenode_path =  "{0}:{1}".format(__get_component_host("namenode_host"), PXF_PORT) if dfs_nameservice is None else dfs_nameservice
+namenode_path =  "{0}:{1}".format(__get_component_host("namenode_host"), hawq_constants.PXF_PORT) if dfs_nameservice is None else dfs_nameservice
 table_definition = {
   "HAWQ": {
     "name": "ambari_hawq_test",
@@ -90,13 +97,13 @@ table_definition = {
     "name": "ambari_hawq_pxf_hdfs_readable_test",
     "create_type": "READABLE EXTERNAL",
     "drop_type": "EXTERNAL",
-    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, pxf_hdfs_test_dir)
+    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, hawq_constants.pxf_hdfs_test_dir)
   },
   "EXTERNAL_HDFS_WRITABLE": {
     "name": "ambari_hawq_pxf_hdfs_writable_test",
     "create_type": "WRITABLE EXTERNAL",
     "drop_type": "EXTERNAL",
-    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, pxf_hdfs_test_dir)
+    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, hawq_constants.pxf_hdfs_test_dir)
   }
 }
 


[33/33] ambari git commit: Merge branch 'trunk' into branch-dev-patch-upgrade

Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/418745d1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/418745d1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/418745d1

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 418745d1297e41d08ba522544ffb17d3bbb22f77
Parents: e4d1475 f927149
Author: Nate Cole <nc...@hortonworks.com>
Authored: Thu Feb 18 08:33:10 2016 -0500
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Feb 18 08:33:10 2016 -0500

----------------------------------------------------------------------
 .../libraries/providers/hdfs_resource.py        |  63 +-
 .../libraries/resources/hdfs_resource.py        |  12 +
 ambari-funtest/pom.xml                          |  13 +-
 ambari-funtest/src/main/assemblies/empty.xml    |  22 +
 ambari-funtest/src/main/assemblies/funtest.xml  |  79 --
 ambari-metrics/ambari-metrics-grafana/README.md |  22 +-
 .../ambari-metrics/datasource.js                |  49 +-
 .../screenshots/21-multi-templating.png         | Bin 0 -> 92034 bytes
 .../conf/unix/ambari-metrics-monitor            |   6 +-
 .../conf/unix/ambari-metrics-collector          |   2 +-
 ambari-server/conf/unix/log4j.properties        |   9 +
 ambari-server/conf/windows/log4j.properties     |   9 +
 ambari-server/src/main/conf/log4j.properties    |   9 +
 .../api/resources/GroupResourceDefinition.java  |   1 +
 .../resources/ResourceInstanceFactoryImpl.java  |   4 +
 .../api/services/GroupPrivilegeService.java     |  76 ++
 .../server/api/services/GroupService.java       |  11 +
 .../server/checks/CheckDatabaseHelper.java      | 155 ++--
 .../internal/DefaultProviderModule.java         |   2 +
 .../GroupPrivilegeResourceProvider.java         | 237 ++++++
 .../ambari/server/controller/spi/Resource.java  |   2 +
 .../server/upgrade/UpgradeCatalog240.java       |  76 +-
 .../main/python/ambari_server/checkDatabase.py  |   7 +-
 .../1.6.1.2.2.0/package/scripts/params.py       |   1 +
 .../0.1.0/package/scripts/params.py             |   1 +
 .../0.5.0.2.1/package/scripts/params_linux.py   |   1 +
 .../HAWQ/2.0.0/configuration/gpcheck-env.xml    |  89 --
 .../HAWQ/2.0.0/configuration/hawq-check-env.xml |  89 ++
 .../common-services/HAWQ/2.0.0/kerberos.json    | 125 ++-
 .../common-services/HAWQ/2.0.0/metainfo.xml     |  16 +-
 .../HAWQ/2.0.0/package/scripts/common.py        |  63 +-
 .../2.0.0/package/scripts/hawq_constants.py     |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqmaster.py    |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqsegment.py   |   2 +-
 .../HAWQ/2.0.0/package/scripts/hawqstandby.py   |  10 +-
 .../HAWQ/2.0.0/package/scripts/master_helper.py |   2 +-
 .../HAWQ/2.0.0/package/scripts/params.py        |  21 +-
 .../HAWQ/2.0.0/package/scripts/utils.py         |  10 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../KAFKA/0.8.1.2.2/package/scripts/params.py   |   1 +
 .../0.5.0.2.2/package/scripts/params_linux.py   |   1 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |   1 +
 .../4.0.0.2.0/package/scripts/params_linux.py   |   1 +
 .../0.12.0.2.0/package/scripts/params_linux.py  |   1 +
 .../0.60.0.2.2/package/scripts/params_linux.py  |   1 +
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   1 +
 .../0.9.1.2.1/package/scripts/params_linux.py   |   1 +
 .../0.4.0.2.1/package/scripts/params_linux.py   |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |   1 +
 .../custom_actions/scripts/ru_execute_tasks.py  |   2 +-
 .../main/resources/scripts/Ambaripreupload.py   |  54 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |   1 +
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.3.xml |   3 +-
 .../HDP/2.2/upgrades/nonrolling-upgrade-2.4.xml |   3 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   2 +
 .../stacks/HDP/2.2/upgrades/upgrade-2.4.xml     |   2 +
 .../KAFKA/configuration/kafka-broker.xml        |   2 +-
 .../services/RANGER/themes/theme_version_2.json |  20 +-
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |   5 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |   3 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |   4 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |   5 +-
 .../main/resources/stacks/HDP/2.5/metainfo.xml  |  25 +
 .../resources/stacks/HDP/2.5/repos/repoinfo.xml |  92 ++
 .../HDP/2.5/services/ACCUMULO/metainfo.xml      |  26 +
 .../stacks/HDP/2.5/services/ATLAS/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/FALCON/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/FLUME/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/HBASE/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/HDFS/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/HIVE/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/KAFKA/metainfo.xml  |  26 +
 .../HDP/2.5/services/KERBEROS/metainfo.xml      |  25 +
 .../stacks/HDP/2.5/services/KNOX/metainfo.xml   |  26 +
 .../stacks/HDP/2.5/services/MAHOUT/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/OOZIE/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/PIG/metainfo.xml    |  26 +
 .../stacks/HDP/2.5/services/RANGER/metainfo.xml |  29 +
 .../HDP/2.5/services/RANGER_KMS/metainfo.xml    |  29 +
 .../stacks/HDP/2.5/services/SLIDER/metainfo.xml |  26 +
 .../stacks/HDP/2.5/services/SPARK/metainfo.xml  |  29 +
 .../stacks/HDP/2.5/services/SQOOP/metainfo.xml  |  26 +
 .../stacks/HDP/2.5/services/STORM/metainfo.xml  |  27 +
 .../stacks/HDP/2.5/services/TEZ/metainfo.xml    |  26 +
 .../stacks/HDP/2.5/services/YARN/metainfo.xml   |  27 +
 .../HDP/2.5/services/ZOOKEEPER/metainfo.xml     |  26 +
 .../stacks/HDP/2.5/services/stack_advisor.py    |  22 +
 .../api/services/GroupPrivilegeServiceTest.java | 109 +++
 .../server/checks/CheckDatabaseHelperTest.java  |  29 +-
 .../GroupPrivilegeResourceProviderTest.java     | 362 ++++++++
 .../server/upgrade/UpgradeCatalog240Test.java   |  92 +-
 .../src/test/python/TestAmbariServer.py         |   3 +-
 ambari-server/src/test/python/TestSetupAgent.py |   2 +-
 .../custom_actions/test_ru_execute_tasks.py     |   4 +-
 .../AMBARI_METRICS/test_metrics_collector.py    |   6 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  18 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  54 +-
 .../stacks/2.0.6/HDFS/test_service_check.py     |   8 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  28 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |  12 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  32 +-
 .../stacks/2.0.6/OOZIE/test_service_check.py    |  10 +-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  |  12 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |  29 +-
 .../2.0.6/YARN/test_mapreduce2_service_check.py |  12 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |  12 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  12 +-
 .../python/stacks/2.1/TEZ/test_service_check.py |  16 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   2 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    |  12 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |  11 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |   8 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   4 +-
 .../test/python/stacks/2.3/YARN/test_ats_1_5.py |  10 +-
 .../stacks/2.3/common/services-hawq-1-host.json |   2 +-
 .../2.3/common/services-hawq-3-hosts.json       |   2 +-
 .../2.3/common/services-hawq-pxf-hdfs.json      |   4 +-
 .../services-master_ambari_colo-3-hosts.json    |   2 +-
 .../services-master_standby_colo-3-hosts.json   |   2 +-
 .../common/services-normal-hawq-3-hosts.json    |   2 +-
 .../services-standby_ambari_colo-3-hosts.json   |   2 +-
 ambari-web/app/assets/test/tests.js             |   3 +
 .../hawq/activateStandby/step3_controller.js    |   2 +-
 .../alerts/definition_configs_controller.js     |   4 +-
 ambari-web/app/controllers/main/host.js         |  18 +
 .../controllers/main/host/combo_search_box.js   |  43 +-
 .../controllers/main/service/info/configs.js    |  23 +-
 ambari-web/app/controllers/main/service/item.js |  46 +-
 ambari-web/app/controllers/wizard.js            |   3 -
 .../app/controllers/wizard/step7_controller.js  |   6 +-
 ambari-web/app/data/HDP2.3/site_properties.js   |   4 +-
 ambari-web/app/messages.js                      |   5 +-
 .../app/mixins/common/configs/configs_saver.js  |   4 -
 .../app/mixins/common/widgets/widget_mixin.js   |  70 +-
 .../mixins/wizard/assign_master_components.js   |  58 +-
 ambari-web/app/models/alerts/alert_config.js    |   4 +-
 .../configs/objects/service_config_property.js  |   2 +
 ambari-web/app/models/host_component.js         |  27 +-
 ambari-web/app/models/service.js                |   2 +-
 ambari-web/app/models/stack_service.js          |   4 +-
 .../main/host/details/host_component.hbs        |   2 +-
 ambari-web/app/utils/ajax/ajax.js               |  12 +
 ambari-web/app/utils/config.js                  | 151 ++--
 ambari-web/app/utils/helper.js                  |  14 +
 .../app/views/common/chart/linear_time.js       |  88 +-
 ambari-web/app/views/common/controls_view.js    |  75 +-
 .../main/admin/stack_upgrade/versions_view.js   |   6 +-
 .../app/views/main/host/combo_search_box.js     |  55 +-
 .../main/host/details/host_component_view.js    |   7 +-
 ambari-web/app/views/main/service/info/menu.js  |  45 +-
 .../app/views/main/service/info/summary.js      | 154 ++--
 ambari-web/app/views/main/service/item.js       |  41 +-
 .../definitions_configs_controller_test.js      |   4 +-
 .../service/reassign/step4_controller_test.js   |   5 +-
 .../test/controllers/wizard/step7_test.js       |  26 -
 .../test/mixins/common/widget_mixin_test.js     |  51 +-
 .../wizard/assign_master_components_test.js     | 154 ++++
 .../test/models/alerts/alert_config_test.js     |   8 +-
 ambari-web/test/utils/ajax/ajax_test.js         |  32 +
 ambari-web/test/utils/config_test.js            | 242 ------
 .../test/views/common/chart/linear_time_test.js |  13 +-
 .../stack_upgrade/upgrade_wizard_view_test.js   |   5 -
 .../service/info/component_list_view_test.js    | 153 ++++
 .../test/views/main/service/info/config_test.js |  99 ++-
 .../test/views/main/service/info/menu_test.js   |  93 ++
 .../views/main/service/info/summary_test.js     | 852 +++++++++++++------
 docs/pom.xml                                    |   6 +-
 169 files changed, 4203 insertions(+), 1583 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-server/src/main/java/org/apache/ambari/server/controller/spi/Resource.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-web/app/messages.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/418745d1/ambari-web/app/views/main/admin/stack_upgrade/versions_view.js
----------------------------------------------------------------------